summaryrefslogtreecommitdiffstats
path: root/tests
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-09-19 04:57:09 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-09-19 04:57:09 +0000
commit2722609ed8cf1f24bb6a8b8a5ad9d7ac6dec58c3 (patch)
treee0f8becff83e01bc4228b1824e81a6a355d6e439 /tests
parentReleasing progress-linux version 7.3.7-3~progress7.99u1. (diff)
downloadsphinx-2722609ed8cf1f24bb6a8b8a5ad9d7ac6dec58c3.tar.xz
sphinx-2722609ed8cf1f24bb6a8b8a5ad9d7ac6dec58c3.zip
Merging upstream version 7.4.7.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tests')
-rw-r--r--tests/js/fixtures/cpp/searchindex.js1
-rw-r--r--tests/js/fixtures/multiterm/searchindex.js1
-rw-r--r--tests/js/fixtures/partial/searchindex.js1
-rw-r--r--tests/js/fixtures/titles/searchindex.js1
-rw-r--r--tests/js/language_data.js26
-rw-r--r--tests/js/roots/cpp/conf.py0
-rw-r--r--tests/js/roots/cpp/index.rst10
-rw-r--r--tests/js/roots/multiterm/conf.py0
-rw-r--r--tests/js/roots/multiterm/index.rst13
-rw-r--r--tests/js/roots/partial/conf.py0
-rw-r--r--tests/js/roots/partial/index.rst9
-rw-r--r--tests/js/roots/titles/conf.py6
-rw-r--r--tests/js/roots/titles/index.rst20
-rw-r--r--tests/js/roots/titles/relevance.py7
-rw-r--r--tests/js/roots/titles/relevance.rst13
-rw-r--r--tests/js/searchtools.js166
-rw-r--r--tests/roots/test-add_source_parser-conflicts-with-users-setting/conf.py5
-rw-r--r--tests/roots/test-add_source_parser/conf.py1
-rw-r--r--tests/roots/test-autosummary/conf.py2
-rw-r--r--tests/roots/test-build-text/conf.py4
-rw-r--r--tests/roots/test-domain-py-python_maximum_signature_line_length/index.rst13
-rw-r--r--tests/roots/test-domain-py/index.rst1
-rw-r--r--tests/roots/test-domain-py/module.rst3
-rw-r--r--tests/roots/test-domain-py/roles.rst6
-rw-r--r--tests/roots/test-domain-py/type_alias.rst15
-rw-r--r--tests/roots/test-ext-autodoc/conf.py3
-rw-r--r--tests/roots/test-ext-autodoc/target/annotated.py36
-rw-r--r--tests/roots/test-ext-autosummary-import_cycle/conf.py7
-rw-r--r--tests/roots/test-ext-autosummary-import_cycle/index.rst6
-rw-r--r--tests/roots/test-ext-autosummary-import_cycle/spam/__init__.py1
-rw-r--r--tests/roots/test-ext-autosummary-import_cycle/spam/eggs.py10
-rw-r--r--tests/roots/test-ext-autosummary-module_prefix/conf.py8
-rw-r--r--tests/roots/test-ext-autosummary-module_prefix/index.rst5
-rw-r--r--tests/roots/test-ext-autosummary-module_prefix/pkg/__init__.py0
-rw-r--r--tests/roots/test-ext-autosummary-module_prefix/pkg/mod0/__init__.py0
-rw-r--r--tests/roots/test-ext-autosummary-module_prefix/pkg/mod1/__init__.py0
-rw-r--r--tests/roots/test-ext-autosummary/conf.py3
-rw-r--r--tests/roots/test-ext-coverage/conf.py5
-rw-r--r--tests/roots/test-ext-coverage/grog/__init__.py0
-rw-r--r--tests/roots/test-ext-coverage/grog/coverage_ignored.py (renamed from tests/roots/test-ext-coverage/coverage_ignored.py)0
-rw-r--r--tests/roots/test-ext-coverage/grog/coverage_missing.py7
-rw-r--r--tests/roots/test-ext-coverage/grog/coverage_not_ignored.py (renamed from tests/roots/test-ext-coverage/coverage_not_ignored.py)0
-rw-r--r--tests/roots/test-ext-coverage/index.rst4
-rw-r--r--tests/roots/test-ext-doctest-skipif/conf.py4
-rw-r--r--tests/roots/test-ext-doctest/conf.py4
-rw-r--r--tests/roots/test-html_assets/extra/API.html.jinja (renamed from tests/roots/test-html_assets/extra/API.html_t)0
-rw-r--r--tests/roots/test-html_assets/static/API.html.jinja (renamed from tests/roots/test-html_assets/static/API.html_t)0
-rw-r--r--tests/roots/test-images/index.rst5
-rw-r--r--tests/roots/test-inheritance/conf.py1
-rw-r--r--tests/roots/test-intl/conf.py4
-rw-r--r--tests/roots/test-intl/glossary_terms_inconsistency.txt1
-rw-r--r--tests/roots/test-intl/index.txt1
-rw-r--r--tests/roots/test-intl/markup.txt6
-rw-r--r--tests/roots/test-intl/role_xref.txt3
-rw-r--r--tests/roots/test-intl/xx/LC_MESSAGES/glossary_terms_inconsistency.po3
-rw-r--r--tests/roots/test-intl/xx/LC_MESSAGES/literalblock.po32
-rw-r--r--tests/roots/test-intl/xx/LC_MESSAGES/markup.po25
-rw-r--r--tests/roots/test-latex-figure-in-admonition/conf.py2
-rw-r--r--tests/roots/test-latex-figure-in-admonition/index.rst19
-rw-r--r--tests/roots/test-latex-table/_mytemplates/latex/longtable.tex.jinja (renamed from tests/roots/test-latex-table/_mytemplates/latex/longtable.tex_t)0
-rw-r--r--tests/roots/test-latex-table/_mytemplates/latex/tabulary.tex_t1
-rw-r--r--tests/roots/test-linkcheck-anchors-ignore-for-url/index.rst1
-rw-r--r--tests/roots/test-markup-rubric/conf.py1
-rw-r--r--tests/roots/test-markup-rubric/index.rst32
-rw-r--r--tests/roots/test-root/conf.py7
-rw-r--r--tests/roots/test-root/images.txt8
-rw-r--r--tests/roots/test-root/markup.txt13
-rw-r--r--tests/roots/test-templating/conf.py4
-rw-r--r--tests/roots/test-theming/test_theme/staticfiles/static/legacytmpl.html_t2
-rw-r--r--tests/roots/test-theming/test_theme/staticfiles/static/statictmpl.html.jinja (renamed from tests/roots/test-theming/test_theme/staticfiles/static/statictmpl.html_t)0
-rw-r--r--tests/roots/test-toctree-domain-objects/document_scoping.rst23
-rw-r--r--tests/roots/test-toctree-domain-objects/index.rst1
-rw-r--r--tests/roots/test-util-copyasset_overwrite/conf.py7
-rw-r--r--tests/roots/test-util-copyasset_overwrite/index.rst0
-rw-r--r--tests/roots/test-util-copyasset_overwrite/myext.py22
-rw-r--r--tests/roots/test-util-copyasset_overwrite/myext_static/custom-styles.css1
-rw-r--r--tests/roots/test-util-copyasset_overwrite/user_static/custom-styles.css1
-rw-r--r--tests/roots/test-versioning/conf.py4
-rw-r--r--tests/test_addnodes.py2
-rw-r--r--tests/test_application.py2
-rw-r--r--tests/test_builders/test_build.py2
-rw-r--r--tests/test_builders/test_build_dirhtml.py8
-rw-r--r--tests/test_builders/test_build_epub.py11
-rw-r--r--tests/test_builders/test_build_gettext.py51
-rw-r--r--tests/test_builders/test_build_html.py77
-rw-r--r--tests/test_builders/test_build_html_5_output.py33
-rw-r--r--tests/test_builders/test_build_html_assets.py2
-rw-r--r--tests/test_builders/test_build_html_download.py1
-rw-r--r--tests/test_builders/test_build_html_image.py5
-rw-r--r--tests/test_builders/test_build_latex.py166
-rw-r--r--tests/test_builders/test_build_linkcheck.py235
-rw-r--r--tests/test_builders/test_build_manpage.py10
-rw-r--r--tests/test_builders/test_build_texinfo.py10
-rw-r--r--tests/test_config/test_config.py18
-rw-r--r--tests/test_directives/test_directive_code.py16
-rw-r--r--tests/test_directives/test_directive_other.py12
-rw-r--r--tests/test_domains/test_domain_cpp.py108
-rw-r--r--tests/test_domains/test_domain_py.py80
-rw-r--r--tests/test_domains/test_domain_py_pyobject.py71
-rw-r--r--tests/test_environment/test_environment.py2
-rw-r--r--tests/test_environment/test_environment_indexentries.py86
-rw-r--r--tests/test_environment/test_environment_toctree.py37
-rw-r--r--tests/test_extensions/test_ext_apidoc.py25
-rw-r--r--tests/test_extensions/test_ext_autodoc.py54
-rw-r--r--tests/test_extensions/test_ext_autodoc_automodule.py16
-rw-r--r--tests/test_extensions/test_ext_autodoc_configs.py31
-rw-r--r--tests/test_extensions/test_ext_autosummary.py38
-rw-r--r--tests/test_extensions/test_ext_autosummary_imports.py49
-rw-r--r--tests/test_extensions/test_ext_coverage.py40
-rw-r--r--tests/test_extensions/test_ext_graphviz.py2
-rw-r--r--tests/test_extensions/test_ext_imgconverter.py4
-rw-r--r--tests/test_extensions/test_ext_imgmockconverter.py2
-rw-r--r--tests/test_extensions/test_ext_inheritance_diagram.py2
-rw-r--r--tests/test_extensions/test_ext_intersphinx.py33
-rw-r--r--tests/test_extensions/test_ext_math.py22
-rw-r--r--tests/test_extensions/test_ext_todo.py2
-rw-r--r--tests/test_extensions/test_ext_viewcode.py4
-rw-r--r--tests/test_intl/test_catalogs.py2
-rw-r--r--tests/test_intl/test_intl.py61
-rw-r--r--tests/test_markup/test_markup.py18
-rw-r--r--tests/test_markup/test_smartquotes.py6
-rw-r--r--tests/test_search.py60
-rw-r--r--tests/test_theming/test_theming.py7
-rw-r--r--tests/test_transforms/test_transforms_post_transforms.py6
-rw-r--r--tests/test_transforms/test_transforms_post_transforms_code.py2
-rw-r--r--tests/test_transforms/test_transforms_post_transforms_images.py46
-rw-r--r--tests/test_util/intersphinx_data.py12
-rw-r--r--tests/test_util/test_util_docutils_sphinx_directive.py139
-rw-r--r--tests/test_util/test_util_fileutil.py43
-rw-r--r--tests/test_util/test_util_i18n.py10
-rw-r--r--tests/test_util/test_util_inspect.py6
-rw-r--r--tests/test_util/test_util_inventory.py19
-rw-r--r--tests/test_util/test_util_typing.py197
-rw-r--r--tests/test_util/typing_test_data.py6
134 files changed, 2156 insertions, 510 deletions
diff --git a/tests/js/fixtures/cpp/searchindex.js b/tests/js/fixtures/cpp/searchindex.js
new file mode 100644
index 0000000..46f4824
--- /dev/null
+++ b/tests/js/fixtures/cpp/searchindex.js
@@ -0,0 +1 @@
+Search.setIndex({"alltitles": {}, "docnames": ["index"], "envversion": {"sphinx": 62, "sphinx.domains.c": 3, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 9, "sphinx.domains.index": 1, "sphinx.domains.javascript": 3, "sphinx.domains.math": 2, "sphinx.domains.python": 4, "sphinx.domains.rst": 2, "sphinx.domains.std": 2}, "filenames": ["index.rst"], "indexentries": {"sphinx (c++ class)": [[0, "_CPPv46Sphinx", false]]}, "objects": {"": [[0, 0, 1, "_CPPv46Sphinx", "Sphinx"]]}, "objnames": {"0": ["cpp", "class", "C++ class"]}, "objtypes": {"0": "cpp:class"}, "terms": {"The": 0, "becaus": 0, "c": 0, "can": 0, "cardin": 0, "challeng": 0, "charact": 0, "class": 0, "descript": 0, "drop": 0, "engin": 0, "fixtur": 0, "frequent": 0, "gener": 0, "i": 0, "index": 0, "inflat": 0, "mathemat": 0, "occur": 0, "often": 0, "project": 0, "punctuat": 0, "queri": 0, "relat": 0, "sampl": 0, "search": 0, "size": 0, "sphinx": 0, "term": 0, "thei": 0, "thi": 0, "token": 0, "us": 0, "web": 0, "would": 0}, "titles": ["&lt;no title&gt;"], "titleterms": {}}) \ No newline at end of file
diff --git a/tests/js/fixtures/multiterm/searchindex.js b/tests/js/fixtures/multiterm/searchindex.js
new file mode 100644
index 0000000..a868eb6
--- /dev/null
+++ b/tests/js/fixtures/multiterm/searchindex.js
@@ -0,0 +1 @@
+Search.setIndex({"alltitles": {"Main Page": [[0, null]]}, "docnames": ["index"], "envversion": {"sphinx": 62, "sphinx.domains.c": 3, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 9, "sphinx.domains.index": 1, "sphinx.domains.javascript": 3, "sphinx.domains.math": 2, "sphinx.domains.python": 4, "sphinx.domains.rst": 2, "sphinx.domains.std": 2}, "filenames": ["index.rst"], "indexentries": {}, "objects": {}, "objnames": {}, "objtypes": {}, "terms": {"At": 0, "adjac": 0, "all": 0, "an": 0, "appear": 0, "applic": 0, "ar": 0, "built": 0, "can": 0, "check": 0, "contain": 0, "do": 0, "document": 0, "doesn": 0, "each": 0, "fixtur": 0, "format": 0, "function": 0, "futur": 0, "html": 0, "i": 0, "includ": 0, "match": 0, "messag": 0, "multipl": 0, "multiterm": 0, "order": 0, "other": 0, "output": 0, "perform": 0, "perhap": 0, "phrase": 0, "project": 0, "queri": 0, "requir": 0, "same": 0, "search": 0, "successfulli": 0, "support": 0, "t": 0, "term": 0, "test": 0, "thi": 0, "time": 0, "us": 0, "when": 0, "write": 0}, "titles": ["Main Page"], "titleterms": {"main": 0, "page": 0}}) \ No newline at end of file
diff --git a/tests/js/fixtures/partial/searchindex.js b/tests/js/fixtures/partial/searchindex.js
new file mode 100644
index 0000000..356386a
--- /dev/null
+++ b/tests/js/fixtures/partial/searchindex.js
@@ -0,0 +1 @@
+Search.setIndex({"alltitles": {"sphinx_utils module": [[0, null]]}, "docnames": ["index"], "envversion": {"sphinx": 62, "sphinx.domains.c": 3, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 9, "sphinx.domains.index": 1, "sphinx.domains.javascript": 3, "sphinx.domains.math": 2, "sphinx.domains.python": 4, "sphinx.domains.rst": 2, "sphinx.domains.std": 2}, "filenames": ["index.rst"], "indexentries": {}, "objects": {}, "objnames": {}, "objtypes": {}, "terms": {"also": 0, "ar": 0, "built": 0, "confirm": 0, "document": 0, "function": 0, "html": 0, "i": 0, "includ": 0, "input": 0, "javascript": 0, "known": 0, "match": 0, "partial": 0, "possibl": 0, "prefix": 0, "project": 0, "provid": 0, "restructuredtext": 0, "sampl": 0, "search": 0, "should": 0, "thi": 0, "titl": 0, "us": 0, "when": 0}, "titles": ["sphinx_utils module"], "titleterms": {"modul": 0, "sphinx_util": 0}}) \ No newline at end of file
diff --git a/tests/js/fixtures/titles/searchindex.js b/tests/js/fixtures/titles/searchindex.js
new file mode 100644
index 0000000..9a229d0
--- /dev/null
+++ b/tests/js/fixtures/titles/searchindex.js
@@ -0,0 +1 @@
+Search.setIndex({"alltitles": {"Main Page": [[0, null]], "Relevance": [[0, "relevance"], [1, null]]}, "docnames": ["index", "relevance"], "envversion": {"sphinx": 62, "sphinx.domains.c": 3, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 9, "sphinx.domains.index": 1, "sphinx.domains.javascript": 3, "sphinx.domains.math": 2, "sphinx.domains.python": 4, "sphinx.domains.rst": 2, "sphinx.domains.std": 2}, "filenames": ["index.rst", "relevance.rst"], "indexentries": {"example (class in relevance)": [[0, "relevance.Example", false]], "module": [[0, "module-relevance", false]], "relevance": [[0, "module-relevance", false]], "relevance (relevance.example attribute)": [[0, "relevance.Example.relevance", false]]}, "objects": {"": [[0, 0, 0, "-", "relevance"]], "relevance": [[0, 1, 1, "", "Example"]], "relevance.Example": [[0, 2, 1, "", "relevance"]]}, "objnames": {"0": ["py", "module", "Python module"], "1": ["py", "class", "Python class"], "2": ["py", "attribute", "Python attribute"]}, "objtypes": {"0": "py:module", "1": "py:class", "2": "py:attribute"}, "terms": {"": [0, 1], "A": 1, "For": 1, "In": [0, 1], "against": 0, "also": 1, "an": 0, "answer": 0, "appear": 1, "ar": 1, "area": 0, "ask": 0, "attribut": 0, "built": 1, "can": [0, 1], "class": 0, "code": [0, 1], "consid": 1, "contain": 0, "context": 0, "corpu": 1, "could": 1, "demonstr": 0, "describ": 1, "detail": 1, "determin": 1, "docstr": 0, "document": [0, 1], "domain": 1, "engin": 0, "exampl": [0, 1], "extract": 0, "find": 0, "found": 0, "from": 0, "function": 1, "ha": 1, "handl": 0, "happen": 1, "head": 0, "help": 0, "highli": 1, "how": 0, "i": [0, 1], "improv": 0, "inform": 0, "intend": 0, "issu": 1, "itself": 1, "knowledg": 0, "languag": 1, "less": 1, "like": [0, 1], "match": 0, "mention": 1, "name": [0, 1], "object": 0, "one": 1, "onli": 1, "other": 0, "page": 1, "part": 1, "particular": 0, "printf": 1, "program": 1, "project": 0, "queri": [0, 1], "question": 0, "re": 0, "rel": 0, "research": 0, "result": 1, "sai": 0, "same": 1, "score": 0, "search": [0, 1], "seem": 0, "softwar": 1, "some": 1, "sphinx": 0, "straightforward": 1, "subject": 0, "subsect": 0, "term": [0, 1], "test": 0, "text": 0, "than": 1, "thei": 0, "them": 0, "thi": 0, "titl": 0, "user": [0, 1], "we": [0, 1], "when": 0, "whether": 1, "within": 0, "would": 1}, "titles": ["Main Page", "Relevance"], "titleterms": {"main": 0, "page": 0, "relev": [0, 1]}}) \ No newline at end of file
diff --git a/tests/js/language_data.js b/tests/js/language_data.js
new file mode 100644
index 0000000..89083d9
--- /dev/null
+++ b/tests/js/language_data.js
@@ -0,0 +1,26 @@
+/*
+ * language_data.js
+ * ~~~~~~~~~~~~~~~~
+ *
+ * This script contains the language-specific data used by searchtools.js,
+ * namely the list of stopwords, stemmer, scorer and splitter.
+ *
+ * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+var stopwords = [];
+
+
+/* Non-minified version is copied as a separate JS file, if available */
+
+/**
+ * Dummy stemmer for languages without stemming rules.
+ */
+var Stemmer = function() {
+ this.stemWord = function(w) {
+ return w;
+ }
+}
+
diff --git a/tests/js/roots/cpp/conf.py b/tests/js/roots/cpp/conf.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/js/roots/cpp/conf.py
diff --git a/tests/js/roots/cpp/index.rst b/tests/js/roots/cpp/index.rst
new file mode 100644
index 0000000..d731343
--- /dev/null
+++ b/tests/js/roots/cpp/index.rst
@@ -0,0 +1,10 @@
+This is a sample C++ project used to generate a search engine index fixture.
+
+.. cpp:class:: public Sphinx
+
+ The description of Sphinx class.
+
+Indexing and querying the term C++ can be challenging, because search-related
+tokenization often drops punctuation and mathematical characters (they occur
+frequently on the web and would inflate the cardinality and size of web search
+indexes).
diff --git a/tests/js/roots/multiterm/conf.py b/tests/js/roots/multiterm/conf.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/js/roots/multiterm/conf.py
diff --git a/tests/js/roots/multiterm/index.rst b/tests/js/roots/multiterm/index.rst
new file mode 100644
index 0000000..495e5ce
--- /dev/null
+++ b/tests/js/roots/multiterm/index.rst
@@ -0,0 +1,13 @@
+Main Page
+=========
+
+This is the main page of the ``multiterm`` test project.
+
+This document is used as a test fixture to check that the search functionality
+included when projects are built into an HTML output format can successfully
+match this document when a search query containing multiple terms is performed.
+
+At the time-of-writing this message, the application doesn't support "phrase
+queries" -- queries that require all of the contained terms to appear adjacent
+to each other and in the same order in the document as in the query; perhaps it
+will do in future?
diff --git a/tests/js/roots/partial/conf.py b/tests/js/roots/partial/conf.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/js/roots/partial/conf.py
diff --git a/tests/js/roots/partial/index.rst b/tests/js/roots/partial/index.rst
new file mode 100644
index 0000000..6a9561b
--- /dev/null
+++ b/tests/js/roots/partial/index.rst
@@ -0,0 +1,9 @@
+sphinx_utils module
+===================
+
+Partial (also known as "prefix") matches on document titles should be possible
+using the JavaScript search functionality included when HTML documentation
+projects are built.
+
+This document provides a sample reStructuredText input to confirm that partial
+title matching is possible.
diff --git a/tests/js/roots/titles/conf.py b/tests/js/roots/titles/conf.py
new file mode 100644
index 0000000..e5f6bb9
--- /dev/null
+++ b/tests/js/roots/titles/conf.py
@@ -0,0 +1,6 @@
+import os
+import sys
+
+sys.path.insert(0, os.path.abspath('.'))
+
+extensions = ['sphinx.ext.autodoc']
diff --git a/tests/js/roots/titles/index.rst b/tests/js/roots/titles/index.rst
new file mode 100644
index 0000000..464cd95
--- /dev/null
+++ b/tests/js/roots/titles/index.rst
@@ -0,0 +1,20 @@
+Main Page
+=========
+
+This is the main page of the ``titles`` test project.
+
+In particular, this test project is intended to demonstrate how Sphinx
+can handle scoring of query matches against document titles and subsection
+heading titles relative to other document matches such as terms found within
+document text and object names extracted from code.
+
+Relevance
+---------
+
+In the context of search engines, we can say that a document is **relevant**
+to a user's query when it contains information that seems likely to help them
+find an answer to a question they're asking, or to improve their knowledge of
+the subject area they're researching.
+
+.. automodule:: relevance
+ :members:
diff --git a/tests/js/roots/titles/relevance.py b/tests/js/roots/titles/relevance.py
new file mode 100644
index 0000000..c4d0eec
--- /dev/null
+++ b/tests/js/roots/titles/relevance.py
@@ -0,0 +1,7 @@
+class Example:
+ """Example class"""
+ num_attribute = 5
+ text_attribute = "string"
+
+ relevance = "testing"
+ """attribute docstring"""
diff --git a/tests/js/roots/titles/relevance.rst b/tests/js/roots/titles/relevance.rst
new file mode 100644
index 0000000..18f494f
--- /dev/null
+++ b/tests/js/roots/titles/relevance.rst
@@ -0,0 +1,13 @@
+Relevance
+=========
+
+In some domains, it can be straightforward to determine whether a search result
+is relevant to the user's query.
+
+For example, if we are in a software programming language domain, and a user
+has issued a query for the term ``printf``, then we could consider a document
+in the corpus that describes a built-in language function with the same name
+as (highly) relevant. A document that only happens to mention the ``printf``
+function name as part of some example code that appears on the page would
+also be relevant, but likely less relevant than the one that describes the
+function itself in detail.
diff --git a/tests/js/searchtools.js b/tests/js/searchtools.js
index 4f9984d..ebf37e5 100644
--- a/tests/js/searchtools.js
+++ b/tests/js/searchtools.js
@@ -1,20 +1,38 @@
describe('Basic html theme search', function() {
+ function loadFixture(name) {
+ req = new XMLHttpRequest();
+ req.open("GET", `base/tests/js/fixtures/${name}`, false);
+ req.send(null);
+ return req.responseText;
+ }
+
+ function checkRanking(expectedRanking, results) {
+ let [nextExpected, ...remainingItems] = expectedRanking;
+
+ for (result of results.reverse()) {
+ if (!nextExpected) break;
+
+ let [expectedPage, expectedTitle, expectedTarget] = nextExpected;
+ let [page, title, target] = result;
+
+ if (page == expectedPage && title == expectedTitle && target == expectedTarget) {
+ [nextExpected, ...remainingItems] = remainingItems;
+ }
+ }
+
+ expect(remainingItems.length).toEqual(0);
+ expect(nextExpected).toEqual(undefined);
+ }
+
describe('terms search', function() {
it('should find "C++" when in index', function() {
- index = {
- docnames:["index"],
- filenames:["index.rst"],
- terms:{'c++':0},
- titles:["&lt;no title&gt;"],
- titleterms:{}
- }
- Search.setIndex(index);
- searchterms = ['c++'];
- excluded = [];
- terms = index.terms;
- titleterms = index.titleterms;
+ eval(loadFixture("cpp/searchindex.js"));
+
+ [_searchQuery, searchterms, excluded, ..._remainingItems] = Search._parseQuery('C++');
+ terms = Search._index.terms;
+ titleterms = Search._index.titleterms;
hits = [[
"index",
@@ -28,22 +46,11 @@ describe('Basic html theme search', function() {
});
it('should be able to search for multiple terms', function() {
- index = {
- alltitles: {
- 'Main Page': [[0, 'main-page']],
- },
- docnames:["index"],
- filenames:["index.rst"],
- terms:{main:0, page:0},
- titles:["Main Page"],
- titleterms:{ main:0, page:0 }
- }
- Search.setIndex(index);
+ eval(loadFixture("multiterm/searchindex.js"));
- searchterms = ['main', 'page'];
- excluded = [];
- terms = index.terms;
- titleterms = index.titleterms;
+ [_searchQuery, searchterms, excluded, ..._remainingItems] = Search._parseQuery('main page');
+ terms = Search._index.terms;
+ titleterms = Search._index.titleterms;
hits = [[
'index',
'Main Page',
@@ -55,18 +62,11 @@ describe('Basic html theme search', function() {
});
it('should partially-match "sphinx" when in title index', function() {
- index = {
- docnames:["index"],
- filenames:["index.rst"],
- terms:{'useful': 0, 'utilities': 0},
- titles:["sphinx_utils module"],
- titleterms:{'sphinx_utils': 0}
- }
- Search.setIndex(index);
- searchterms = ['sphinx'];
- excluded = [];
- terms = index.terms;
- titleterms = index.titleterms;
+ eval(loadFixture("partial/searchindex.js"));
+
+ [_searchQuery, searchterms, excluded, ..._remainingItems] = Search._parseQuery('sphinx');
+ terms = Search._index.terms;
+ titleterms = Search._index.titleterms;
hits = [[
"index",
@@ -81,6 +81,88 @@ describe('Basic html theme search', function() {
});
+ describe('aggregation of search results', function() {
+
+ it('should combine document title and document term matches', function() {
+ eval(loadFixture("multiterm/searchindex.js"));
+
+ searchParameters = Search._parseQuery('main page');
+
+ hits = [
+ [
+ 'index',
+ 'Main Page',
+ '',
+ null,
+ 16,
+ 'index.rst'
+ ]
+ ];
+ expect(Search._performSearch(...searchParameters)).toEqual(hits);
+ });
+
+ });
+
+ describe('search result ranking', function() {
+
+ /*
+ * These tests should not proscribe precise expected ordering of search
+ * results; instead each test case should describe a single relevance rule
+ * that helps users to locate relevant information efficiently.
+ *
+ * If you think that one of the rules seems to be poorly-defined or is
+ * limiting the potential for search algorithm improvements, please check
+ * for existing discussion/bugreports related to it on GitHub[1] before
+ * creating one yourself. Suggestions for possible improvements are also
+ * welcome.
+ *
+ * [1] - https://github.com/sphinx-doc/sphinx.git/
+ */
+
+ it('should score a code module match above a page-title match', function() {
+ eval(loadFixture("titles/searchindex.js"));
+
+ expectedRanking = [
+ ['index', 'relevance', '#module-relevance'], /* py:module documentation */
+ ['relevance', 'Relevance', ''], /* main title */
+ ];
+
+ searchParameters = Search._parseQuery('relevance');
+ results = Search._performSearch(...searchParameters);
+
+ checkRanking(expectedRanking, results);
+ });
+
+ it('should score a main-title match above an object member match', function() {
+ eval(loadFixture("titles/searchindex.js"));
+
+ expectedRanking = [
+ ['relevance', 'Relevance', ''], /* main title */
+ ['index', 'relevance.Example.relevance', '#relevance.Example.relevance'], /* py:class attribute */
+ ];
+
+ searchParameters = Search._parseQuery('relevance');
+ results = Search._performSearch(...searchParameters);
+
+ checkRanking(expectedRanking, results);
+ });
+
+ it('should score a main-title match above a subheading-title match', function() {
+ eval(loadFixture("titles/searchindex.js"));
+
+ expectedRanking = [
+ ['relevance', 'Relevance', ''], /* main title */
+ ['index', 'Main Page > Relevance', '#relevance'], /* subsection heading title */
+ ];
+
+ searchParameters = Search._parseQuery('relevance');
+ results = Search._performSearch(...searchParameters);
+
+ checkRanking(expectedRanking, results);
+ });
+
+ });
+
});
describe("htmlToText", function() {
@@ -100,15 +182,15 @@ describe("htmlToText", function() {
</style>
<!-- main content -->
<section id="getting-started">
- <h1>Getting Started</h1>
+ <h1>Getting Started <a class="headerlink" href="#getting-started" title="Link to this heading">¶</a></h1>
<p>Some text</p>
</section>
<section id="other-section">
- <h1>Other Section</h1>
+ <h1>Other Section <a class="headerlink" href="#other-section" title="Link to this heading">¶</a></h1>
<p>Other text</p>
</section>
<section id="yet-another-section">
- <h1>Yet Another Section</h1>
+ <h1>Yet Another Section <a class="headerlink" href="#yet-another-section" title="Link to this heading">¶</a></h1>
<p>More text</p>
</section>
</div>
diff --git a/tests/roots/test-add_source_parser-conflicts-with-users-setting/conf.py b/tests/roots/test-add_source_parser-conflicts-with-users-setting/conf.py
index 3ad5491..5e57901 100644
--- a/tests/roots/test-add_source_parser-conflicts-with-users-setting/conf.py
+++ b/tests/roots/test-add_source_parser-conflicts-with-users-setting/conf.py
@@ -11,7 +11,10 @@ class DummyTestParser(Parser):
extensions = ['source_parser']
-source_suffix = ['.rst', '.test']
+source_suffix = {
+ '.rst': 'restructuredtext',
+ '.test': 'restructuredtext',
+}
source_parsers = {
'.test': DummyTestParser
}
diff --git a/tests/roots/test-add_source_parser/conf.py b/tests/roots/test-add_source_parser/conf.py
index 2acd4d2..ef85560 100644
--- a/tests/roots/test-add_source_parser/conf.py
+++ b/tests/roots/test-add_source_parser/conf.py
@@ -5,4 +5,3 @@ sys.path.insert(0, os.path.abspath('.'))
extensions = ['source_parser']
-source_suffix = ['.rst']
diff --git a/tests/roots/test-autosummary/conf.py b/tests/roots/test-autosummary/conf.py
index 46cf4fa..f459017 100644
--- a/tests/roots/test-autosummary/conf.py
+++ b/tests/roots/test-autosummary/conf.py
@@ -5,8 +5,6 @@ sys.path.insert(0, os.path.abspath('.'))
extensions = ['sphinx.ext.autosummary']
-# The suffix of source filenames.
-source_suffix = '.rst'
autosummary_generate = True
exclude_patterns = ['_build']
diff --git a/tests/roots/test-build-text/conf.py b/tests/roots/test-build-text/conf.py
index fd9eefb..b0fdaf8 100644
--- a/tests/roots/test-build-text/conf.py
+++ b/tests/roots/test-build-text/conf.py
@@ -1,2 +1,4 @@
-source_suffix = '.txt'
+source_suffix = {
+ '.txt': 'restructuredtext'
+}
exclude_patterns = ['_build']
diff --git a/tests/roots/test-domain-py-python_maximum_signature_line_length/index.rst b/tests/roots/test-domain-py-python_maximum_signature_line_length/index.rst
index 75e4683..9715500 100644
--- a/tests/roots/test-domain-py-python_maximum_signature_line_length/index.rst
+++ b/tests/roots/test-domain-py-python_maximum_signature_line_length/index.rst
@@ -4,3 +4,16 @@ domain-py-maximum_signature_line_length
.. py:function:: hello(name: str) -> str
.. py:function:: foo([a, [b, ]]c, d[, e, f])
+
+.. py:function:: generic_arg[T]
+
+.. py:function:: generic_foo[T]()
+
+.. py:function:: generic_bar[T](x: list[T])
+
+.. py:function:: generic_ret[R]() -> R
+
+.. py:class:: MyGenericClass[X]
+
+.. py:class:: MyList[T](list[T])
+
diff --git a/tests/roots/test-domain-py/index.rst b/tests/roots/test-domain-py/index.rst
index b24bbea..71e45f7 100644
--- a/tests/roots/test-domain-py/index.rst
+++ b/tests/roots/test-domain-py/index.rst
@@ -8,3 +8,4 @@ test-domain-py
module_option
abbr
canonical
+ type_alias
diff --git a/tests/roots/test-domain-py/module.rst b/tests/roots/test-domain-py/module.rst
index 70098f6..307e786 100644
--- a/tests/roots/test-domain-py/module.rst
+++ b/tests/roots/test-domain-py/module.rst
@@ -64,3 +64,6 @@ module
.. py:data:: test2
:type: typing.Literal[-2]
+
+.. py:type:: MyType1
+ :canonical: list[int | str]
diff --git a/tests/roots/test-domain-py/roles.rst b/tests/roots/test-domain-py/roles.rst
index 6bff2d2..d3492ce 100644
--- a/tests/roots/test-domain-py/roles.rst
+++ b/tests/roots/test-domain-py/roles.rst
@@ -5,14 +5,19 @@ roles
.. py:method:: top_level
+.. py:type:: TopLevelType
+
* :py:class:`TopLevel`
* :py:meth:`top_level`
+* :py:type:`TopLevelType`
.. py:class:: NestedParentA
* Link to :py:meth:`child_1`
+ .. py:type:: NestedTypeA
+
.. py:method:: child_1()
* Link to :py:meth:`NestedChildA.subchild_2`
@@ -46,3 +51,4 @@ roles
* Link to :py:class:`NestedParentB`
* :py:class:`NestedParentA.NestedChildA`
+* :py:type:`NestedParentA.NestedTypeA`
diff --git a/tests/roots/test-domain-py/type_alias.rst b/tests/roots/test-domain-py/type_alias.rst
new file mode 100644
index 0000000..6a3df44
--- /dev/null
+++ b/tests/roots/test-domain-py/type_alias.rst
@@ -0,0 +1,15 @@
+Type Alias
+==========
+
+.. py:module:: module_two
+
+ .. py:class:: SomeClass
+
+:py:type:`.MyAlias`
+:any:`MyAlias`
+:any:`module_one.MyAlias`
+
+.. py:module:: module_one
+
+ .. py:type:: MyAlias
+ :canonical: list[int | module_two.SomeClass]
diff --git a/tests/roots/test-ext-autodoc/conf.py b/tests/roots/test-ext-autodoc/conf.py
index 979a709..9d1cdc7 100644
--- a/tests/roots/test-ext-autodoc/conf.py
+++ b/tests/roots/test-ext-autodoc/conf.py
@@ -5,9 +5,6 @@ sys.path.insert(0, os.path.abspath('.'))
extensions = ['sphinx.ext.autodoc']
-# The suffix of source filenames.
-source_suffix = '.rst'
-
autodoc_mock_imports = [
'dummy'
]
diff --git a/tests/roots/test-ext-autodoc/target/annotated.py b/tests/roots/test-ext-autodoc/target/annotated.py
index 5b87518..7adc3e0 100644
--- a/tests/roots/test-ext-autodoc/target/annotated.py
+++ b/tests/roots/test-ext-autodoc/target/annotated.py
@@ -1,8 +1,42 @@
-from __future__ import annotations
+# from __future__ import annotations
+import dataclasses
+import types
from typing import Annotated
+@dataclasses.dataclass(frozen=True)
+class FuncValidator:
+ func: types.FunctionType
+
+
+@dataclasses.dataclass(frozen=True)
+class MaxLen:
+ max_length: int
+ whitelisted_words: list[str]
+
+
+def validate(value: str) -> str:
+ return value
+
+
+#: Type alias for a validated string.
+ValidatedString = Annotated[str, FuncValidator(validate)]
+
+
def hello(name: Annotated[str, "attribute"]) -> None:
"""docstring"""
pass
+
+
+class AnnotatedAttributes:
+ """docstring"""
+
+ #: Docstring about the ``name`` attribute.
+ name: Annotated[str, "attribute"]
+
+ #: Docstring about the ``max_len`` attribute.
+ max_len: list[Annotated[str, MaxLen(10, ['word_one', 'word_two'])]]
+
+ #: Docstring about the ``validated`` attribute.
+ validated: ValidatedString
diff --git a/tests/roots/test-ext-autosummary-import_cycle/conf.py b/tests/roots/test-ext-autosummary-import_cycle/conf.py
new file mode 100644
index 0000000..5e889f9
--- /dev/null
+++ b/tests/roots/test-ext-autosummary-import_cycle/conf.py
@@ -0,0 +1,7 @@
+import os
+import sys
+
+sys.path.insert(0, os.path.abspath('.'))
+
+extensions = ['sphinx.ext.autosummary']
+autosummary_generate = False
diff --git a/tests/roots/test-ext-autosummary-import_cycle/index.rst b/tests/roots/test-ext-autosummary-import_cycle/index.rst
new file mode 100644
index 0000000..14e7266
--- /dev/null
+++ b/tests/roots/test-ext-autosummary-import_cycle/index.rst
@@ -0,0 +1,6 @@
+.. automodule:: spam.eggs
+ :members:
+
+ .. autosummary::
+
+ spam.eggs.Ham
diff --git a/tests/roots/test-ext-autosummary-import_cycle/spam/__init__.py b/tests/roots/test-ext-autosummary-import_cycle/spam/__init__.py
new file mode 100644
index 0000000..e94cf4b
--- /dev/null
+++ b/tests/roots/test-ext-autosummary-import_cycle/spam/__init__.py
@@ -0,0 +1 @@
+"""``spam`` module docstring."""
diff --git a/tests/roots/test-ext-autosummary-import_cycle/spam/eggs.py b/tests/roots/test-ext-autosummary-import_cycle/spam/eggs.py
new file mode 100644
index 0000000..12122e8
--- /dev/null
+++ b/tests/roots/test-ext-autosummary-import_cycle/spam/eggs.py
@@ -0,0 +1,10 @@
+"""``spam.eggs`` module docstring."""
+
+import spam # Required for test.
+
+
+class Ham:
+ """``spam.eggs.Ham`` class docstring."""
+ a = 1
+ b = 2
+ c = 3
diff --git a/tests/roots/test-ext-autosummary-module_prefix/conf.py b/tests/roots/test-ext-autosummary-module_prefix/conf.py
new file mode 100644
index 0000000..1065b91
--- /dev/null
+++ b/tests/roots/test-ext-autosummary-module_prefix/conf.py
@@ -0,0 +1,8 @@
+import os
+import sys
+
+sys.path.insert(0, os.path.abspath('.'))
+
+extensions = [
+ 'sphinx.ext.autosummary',
+]
diff --git a/tests/roots/test-ext-autosummary-module_prefix/index.rst b/tests/roots/test-ext-autosummary-module_prefix/index.rst
new file mode 100644
index 0000000..fe0b13c
--- /dev/null
+++ b/tests/roots/test-ext-autosummary-module_prefix/index.rst
@@ -0,0 +1,5 @@
+.. autosummary::
+ :toctree: docs/pkg
+ :recursive:
+
+ pkg
diff --git a/tests/roots/test-ext-autosummary-module_prefix/pkg/__init__.py b/tests/roots/test-ext-autosummary-module_prefix/pkg/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/roots/test-ext-autosummary-module_prefix/pkg/__init__.py
diff --git a/tests/roots/test-ext-autosummary-module_prefix/pkg/mod0/__init__.py b/tests/roots/test-ext-autosummary-module_prefix/pkg/mod0/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/roots/test-ext-autosummary-module_prefix/pkg/mod0/__init__.py
diff --git a/tests/roots/test-ext-autosummary-module_prefix/pkg/mod1/__init__.py b/tests/roots/test-ext-autosummary-module_prefix/pkg/mod1/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/roots/test-ext-autosummary-module_prefix/pkg/mod1/__init__.py
diff --git a/tests/roots/test-ext-autosummary/conf.py b/tests/roots/test-ext-autosummary/conf.py
index 55c769c..1c0d022 100644
--- a/tests/roots/test-ext-autosummary/conf.py
+++ b/tests/roots/test-ext-autosummary/conf.py
@@ -5,6 +5,3 @@ sys.path.insert(0, os.path.abspath('.'))
extensions = ['sphinx.ext.autosummary']
autosummary_generate = True
-
-# The suffix of source filenames.
-source_suffix = '.rst'
diff --git a/tests/roots/test-ext-coverage/conf.py b/tests/roots/test-ext-coverage/conf.py
index d3ec6e8..70fd03e 100644
--- a/tests/roots/test-ext-coverage/conf.py
+++ b/tests/roots/test-ext-coverage/conf.py
@@ -5,8 +5,11 @@ sys.path.insert(0, os.path.abspath('.'))
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.coverage']
+coverage_modules = [
+ 'grog',
+]
coverage_ignore_pyobjects = [
- r'^coverage_ignored(\..*)?$',
+ r'^grog\.coverage_ignored(\..*)?$',
r'\.Ignored$',
r'\.Documented\.ignored\d$',
]
diff --git a/tests/roots/test-ext-coverage/grog/__init__.py b/tests/roots/test-ext-coverage/grog/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/roots/test-ext-coverage/grog/__init__.py
diff --git a/tests/roots/test-ext-coverage/coverage_ignored.py b/tests/roots/test-ext-coverage/grog/coverage_ignored.py
index b762955..b762955 100644
--- a/tests/roots/test-ext-coverage/coverage_ignored.py
+++ b/tests/roots/test-ext-coverage/grog/coverage_ignored.py
diff --git a/tests/roots/test-ext-coverage/grog/coverage_missing.py b/tests/roots/test-ext-coverage/grog/coverage_missing.py
new file mode 100644
index 0000000..2fe4433
--- /dev/null
+++ b/tests/roots/test-ext-coverage/grog/coverage_missing.py
@@ -0,0 +1,7 @@
+"""This module is intentionally not documented."""
+
+class Missing:
+ """An undocumented class."""
+
+ def missing_a(self):
+ """An undocumented method."""
diff --git a/tests/roots/test-ext-coverage/coverage_not_ignored.py b/tests/roots/test-ext-coverage/grog/coverage_not_ignored.py
index b762955..b762955 100644
--- a/tests/roots/test-ext-coverage/coverage_not_ignored.py
+++ b/tests/roots/test-ext-coverage/grog/coverage_not_ignored.py
diff --git a/tests/roots/test-ext-coverage/index.rst b/tests/roots/test-ext-coverage/index.rst
index b846898..85dccf9 100644
--- a/tests/roots/test-ext-coverage/index.rst
+++ b/tests/roots/test-ext-coverage/index.rst
@@ -1,6 +1,6 @@
-.. automodule:: coverage_ignored
+.. automodule:: grog.coverage_ignored
:members:
-.. automodule:: coverage_not_ignored
+.. automodule:: grog.coverage_not_ignored
:members:
diff --git a/tests/roots/test-ext-doctest-skipif/conf.py b/tests/roots/test-ext-doctest-skipif/conf.py
index cd8f3eb..ae00e35 100644
--- a/tests/roots/test-ext-doctest-skipif/conf.py
+++ b/tests/roots/test-ext-doctest-skipif/conf.py
@@ -2,7 +2,9 @@ extensions = ['sphinx.ext.doctest']
project = 'test project for the doctest :skipif: directive'
root_doc = 'skipif'
-source_suffix = '.txt'
+source_suffix = {
+ '.txt': 'restructuredtext'
+}
exclude_patterns = ['_build']
doctest_global_setup = '''
diff --git a/tests/roots/test-ext-doctest/conf.py b/tests/roots/test-ext-doctest/conf.py
index d0e8b10..57fc406 100644
--- a/tests/roots/test-ext-doctest/conf.py
+++ b/tests/roots/test-ext-doctest/conf.py
@@ -2,5 +2,7 @@ extensions = ['sphinx.ext.doctest']
project = 'test project for doctest'
root_doc = 'doctest'
-source_suffix = '.txt'
+source_suffix = {
+ '.txt': 'restructuredtext'
+}
exclude_patterns = ['_build']
diff --git a/tests/roots/test-html_assets/extra/API.html_t b/tests/roots/test-html_assets/extra/API.html.jinja
index 34ecd9d..34ecd9d 100644
--- a/tests/roots/test-html_assets/extra/API.html_t
+++ b/tests/roots/test-html_assets/extra/API.html.jinja
diff --git a/tests/roots/test-html_assets/static/API.html_t b/tests/roots/test-html_assets/static/API.html.jinja
index 34ecd9d..34ecd9d 100644
--- a/tests/roots/test-html_assets/static/API.html_t
+++ b/tests/roots/test-html_assets/static/API.html.jinja
diff --git a/tests/roots/test-images/index.rst b/tests/roots/test-images/index.rst
index 9b9aac1..f6d7160 100644
--- a/tests/roots/test-images/index.rst
+++ b/tests/roots/test-images/index.rst
@@ -27,3 +27,8 @@ test-image
.. non-exist remote image
.. image:: http://localhost:7777/NOT_EXIST.PNG
+
+.. a self-contained image within a data URI
+ This image was generated using ImageMagick 6.9 with the command ``convert -pointsize 32 -font Noto-Sans-Egyptian-Hieroglyphs-Regular caption:$(printf '\U13080') -trim -border 2 -monochrome eoh.png``
+.. image:: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACoAAAAjAQAAAADKt6U+AAAAAmJLR0QAAd2KE6QAAAAHdElNRQfoBQIVBgOBlOMTAAAAEGNhTnYAAAAtAAAAOwAAAAEAAAATst46RgAAAJtJREFUCNdNz70KwkAMAOA8iOhjuGh9HB9BCtoTHHwMH0Mc7KWTmx0dHDpovUk6HCil3sUmATHLR/4IAeJA+LEWPmbEeHJMWbTMZDA0CNFn8x1COFPaIHQ55R7hlZGdIjwj2aovRjJbhPvMLNN+r0g2vB7ByIWbHqqVh3LR3lhZWM0qYV8qjU6+lc4J7ZVx4SjEINBKOSinv/+YL1xvsJE6ztdqAAAADHRFWHRjYXB0aW9uAPCTgoD4hdKUAAAAD3RFWHRjYXB0aW9uOmxpbmVzADGoBz2RAAAAAElFTkSuQmCC
+ :alt: The Eye of Horus in a black font on a white background.
diff --git a/tests/roots/test-inheritance/conf.py b/tests/roots/test-inheritance/conf.py
index 26cadca..9953494 100644
--- a/tests/roots/test-inheritance/conf.py
+++ b/tests/roots/test-inheritance/conf.py
@@ -4,4 +4,3 @@ import sys
sys.path.insert(0, os.path.abspath('.'))
extensions = ['sphinx.ext.inheritance_diagram']
-source_suffix = '.rst'
diff --git a/tests/roots/test-intl/conf.py b/tests/roots/test-intl/conf.py
index 96ac664..09c47bb 100644
--- a/tests/roots/test-intl/conf.py
+++ b/tests/roots/test-intl/conf.py
@@ -1,5 +1,7 @@
project = 'Sphinx intl <Tests>'
-source_suffix = '.txt'
+source_suffix = {
+ '.txt': 'restructuredtext'
+}
keep_warnings = True
templates_path = ['_templates']
html_additional_pages = {'contents': 'contents.html'}
diff --git a/tests/roots/test-intl/glossary_terms_inconsistency.txt b/tests/roots/test-intl/glossary_terms_inconsistency.txt
index 837411b..0de1e7e 100644
--- a/tests/roots/test-intl/glossary_terms_inconsistency.txt
+++ b/tests/roots/test-intl/glossary_terms_inconsistency.txt
@@ -4,3 +4,4 @@ i18n with glossary terms inconsistency
======================================
1. link to :term:`Some term` and :term:`Some other term`.
+2. link to :term:`Some term`.
diff --git a/tests/roots/test-intl/index.txt b/tests/roots/test-intl/index.txt
index 9de15d5..ac68314 100644
--- a/tests/roots/test-intl/index.txt
+++ b/tests/roots/test-intl/index.txt
@@ -31,6 +31,7 @@ CONTENTS
section
translation_progress
topic
+ markup
.. toctree::
:maxdepth: 2
diff --git a/tests/roots/test-intl/markup.txt b/tests/roots/test-intl/markup.txt
new file mode 100644
index 0000000..d167a04
--- /dev/null
+++ b/tests/roots/test-intl/markup.txt
@@ -0,0 +1,6 @@
+i18n with strange markup
+========================
+
+1. title starting with 1.
+-------------------------
+
diff --git a/tests/roots/test-intl/role_xref.txt b/tests/roots/test-intl/role_xref.txt
index 2919b5c..f39e752 100644
--- a/tests/roots/test-intl/role_xref.txt
+++ b/tests/roots/test-intl/role_xref.txt
@@ -7,6 +7,9 @@ i18n role xref
link to :term:`Some term`, :ref:`i18n-role-xref`, :doc:`index`.
+link to :term:`Some term`, :ref:`i18n-role-xref`, :doc:`index`.
+---------------------------------------------------------------
+
.. _same-type-links:
same type links
diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/glossary_terms_inconsistency.po b/tests/roots/test-intl/xx/LC_MESSAGES/glossary_terms_inconsistency.po
index ef2bf30..048b81f 100644
--- a/tests/roots/test-intl/xx/LC_MESSAGES/glossary_terms_inconsistency.po
+++ b/tests/roots/test-intl/xx/LC_MESSAGES/glossary_terms_inconsistency.po
@@ -21,3 +21,6 @@ msgstr "I18N WITH GLOSSARY TERMS INCONSISTENCY"
msgid "link to :term:`Some term` and :term:`Some other term`."
msgstr "LINK TO :term:`SOME NEW TERM`."
+
+msgid "link to :term:`Some term`."
+msgstr "LINK TO :term:`TERM NOT IN GLOSSARY`."
diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/literalblock.po b/tests/roots/test-intl/xx/LC_MESSAGES/literalblock.po
index 8d3e5d8..d320d95 100644
--- a/tests/roots/test-intl/xx/LC_MESSAGES/literalblock.po
+++ b/tests/roots/test-intl/xx/LC_MESSAGES/literalblock.po
@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: sphinx 1.0\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2012-11-22 08:28+0000\n"
+"POT-Creation-Date: 2024-04-14 15:05+0200\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
@@ -22,6 +22,11 @@ msgstr "I18N WITH LITERAL BLOCK"
msgid "Correct literal block::"
msgstr "CORRECT LITERAL BLOCK::"
+msgid ""
+"this is\n"
+"literal block"
+msgstr "THIS IS\nLITERAL BLOCK"
+
msgid "Missing literal block::"
msgstr "MISSING LITERAL BLOCK::"
@@ -31,6 +36,25 @@ msgstr "THAT'S ALL."
msgid "included raw.txt"
msgstr "INCLUDED RAW.TXT"
+msgid ""
+"===\n"
+"Raw\n"
+"===\n"
+"\n"
+".. raw:: html\n"
+"\n"
+" <iframe src=\"https://sphinx-doc.org\"></iframe>\n"
+"\n"
+msgstr ""
+"===\n"
+"RAW\n"
+"===\n"
+"\n"
+".. raw:: html\n"
+"\n"
+" <iframe src=\"HTTPS://SPHINX-DOC.ORG\"></iframe>\n"
+"\n"
+
msgid "code blocks"
msgstr "CODE-BLOCKS"
@@ -43,9 +67,6 @@ msgstr ""
" 'RESULT'\n"
"end"
-msgid "example of C language"
-msgstr "EXAMPLE OF C LANGUAGE"
-
msgid ""
"#include <stdlib.h>\n"
"int main(int argc, char** argv)\n"
@@ -59,6 +80,9 @@ msgstr ""
" return 0;\n"
"}"
+msgid "example of C language"
+msgstr "EXAMPLE OF C LANGUAGE"
+
msgid ""
"#include <stdio.h>\n"
"int main(int argc, char** argv)\n"
diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/markup.po b/tests/roots/test-intl/xx/LC_MESSAGES/markup.po
new file mode 100644
index 0000000..ad6de9b
--- /dev/null
+++ b/tests/roots/test-intl/xx/LC_MESSAGES/markup.po
@@ -0,0 +1,25 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) 2018, dev
+# This file is distributed under the same license as the sphinx package.
+# FIRST AUTHOR <EMAIL@ADDRESS>, 2018.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: sphinx 1.0\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2018-05-06 16:44+0900\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: LANGUAGE <LL@li.org>\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 2.4.0\n"
+
+msgid "i18n with strange markup"
+msgstr "I18N WITH STRANGE MARKUP"
+
+msgid "1. title starting with 1."
+msgstr "1. TITLE STARTING WITH 1."
+
diff --git a/tests/roots/test-latex-figure-in-admonition/conf.py b/tests/roots/test-latex-figure-in-admonition/conf.py
index a45d22e..3d8b7b5 100644
--- a/tests/roots/test-latex-figure-in-admonition/conf.py
+++ b/tests/roots/test-latex-figure-in-admonition/conf.py
@@ -1 +1,3 @@
+extensions = ['sphinx.ext.todo']
+todo_include_todos = True
exclude_patterns = ['_build']
diff --git a/tests/roots/test-latex-figure-in-admonition/index.rst b/tests/roots/test-latex-figure-in-admonition/index.rst
index e3d39d3..c3fcaab 100644
--- a/tests/roots/test-latex-figure-in-admonition/index.rst
+++ b/tests/roots/test-latex-figure-in-admonition/index.rst
@@ -3,7 +3,24 @@ Test Figure in Admonition
.. caution::
- This uses a figure in an admonition.
+ This uses a figure in a caution directive.
.. figure:: img.png
+.. note::
+
+ This uses a figure in a note directive.
+
+ .. figure:: img.png
+
+.. seealso::
+
+ This uses a figure in a seealso directive.
+
+ .. figure:: img.png
+
+.. todo::
+
+ This uses a figure in a todo directive.
+
+ .. figure:: img.png
diff --git a/tests/roots/test-latex-table/_mytemplates/latex/longtable.tex_t b/tests/roots/test-latex-table/_mytemplates/latex/longtable.tex.jinja
index e2cb1db..e2cb1db 100644
--- a/tests/roots/test-latex-table/_mytemplates/latex/longtable.tex_t
+++ b/tests/roots/test-latex-table/_mytemplates/latex/longtable.tex.jinja
diff --git a/tests/roots/test-latex-table/_mytemplates/latex/tabulary.tex_t b/tests/roots/test-latex-table/_mytemplates/latex/tabulary.tex_t
new file mode 100644
index 0000000..7e6d425
--- /dev/null
+++ b/tests/roots/test-latex-table/_mytemplates/latex/tabulary.tex_t
@@ -0,0 +1 @@
+AU REVOIR, KANIGGETS
diff --git a/tests/roots/test-linkcheck-anchors-ignore-for-url/index.rst b/tests/roots/test-linkcheck-anchors-ignore-for-url/index.rst
index df287b4..02969b6 100644
--- a/tests/roots/test-linkcheck-anchors-ignore-for-url/index.rst
+++ b/tests/roots/test-linkcheck-anchors-ignore-for-url/index.rst
@@ -1,5 +1,6 @@
* `Example valid url, no anchor <http://localhost:7777/valid>`_
* `Example valid url, valid anchor <http://localhost:7777/valid#valid-anchor>`_
+* `Example valid url, valid quotable anchor <http://localhost:7777/valid#py:module::urllib.parse>`_
* `Example valid url, invalid anchor <http://localhost:7777/valid#invalid-anchor>`_
* `Example ignored url, no anchor <http://localhost:7777/ignored>`_
* `Example ignored url, invalid anchor <http://localhost:7777/ignored#invalid-anchor>`_
diff --git a/tests/roots/test-markup-rubric/conf.py b/tests/roots/test-markup-rubric/conf.py
index e274bde..eccdbf7 100644
--- a/tests/roots/test-markup-rubric/conf.py
+++ b/tests/roots/test-markup-rubric/conf.py
@@ -1,3 +1,4 @@
latex_documents = [
('index', 'test.tex', 'The basic Sphinx documentation for testing', 'Sphinx', 'report')
]
+latex_toplevel_sectioning = 'section'
diff --git a/tests/roots/test-markup-rubric/index.rst b/tests/roots/test-markup-rubric/index.rst
index c2ae68a..f91b0f7 100644
--- a/tests/roots/test-markup-rubric/index.rst
+++ b/tests/roots/test-markup-rubric/index.rst
@@ -5,3 +5,35 @@ test-markup-rubric
.. rubric:: This is
a multiline rubric
+
+.. rubric:: A rubric with a class
+ :class: myclass
+
+.. rubric:: A rubric with a heading level 1
+ :heading-level: 1
+ :class: myclass
+
+.. rubric:: A rubric with a heading level 2
+ :heading-level: 2
+ :class: myclass
+
+.. rubric:: A rubric with a heading level 3
+ :heading-level: 3
+ :class: myclass
+
+.. rubric:: A rubric with a heading level 4
+ :heading-level: 4
+ :class: myclass
+
+.. rubric:: A rubric with a heading level 5
+ :heading-level: 5
+ :class: myclass
+
+.. rubric:: A rubric with a heading level 6
+ :heading-level: 6
+ :class: myclass
+
+.. rubric:: A rubric with a heading level 7
+ :heading-level: 7
+ :class: myclass
+
diff --git a/tests/roots/test-root/conf.py b/tests/roots/test-root/conf.py
index a14ffaf..25c723b 100644
--- a/tests/roots/test-root/conf.py
+++ b/tests/roots/test-root/conf.py
@@ -17,7 +17,10 @@ jsmath_path = 'dummy.js'
templates_path = ['_templates']
-source_suffix = ['.txt', '.add', '.foo']
+source_suffix = {
+ '.txt': 'restructuredtext',
+ '.foo': 'foo',
+}
project = 'Sphinx <Tests>'
copyright = '1234-6789, copyright text credits'
@@ -68,7 +71,7 @@ latex_elements = {
shadowrule=1pt,
shadowsep=10pt,
shadowsize=10pt,
- div.topic_border-width=2pt,% alias to shadowrule
+ div.topic_border-width=2pt,% alias to shadowrule
div.topic_padding=6pt,% alias to shadowsep
div.topic_box-shadow=5pt,% overrides/alias shadowsize
%
diff --git a/tests/roots/test-root/images.txt b/tests/roots/test-root/images.txt
index 5a096dc..a07429a 100644
--- a/tests/roots/test-root/images.txt
+++ b/tests/roots/test-root/images.txt
@@ -18,5 +18,13 @@ Sphinx image handling
.. an SVG image (for HTML at least)
.. image:: svgimg.*
+.. an SVG image using width with units
+.. image:: svgimg.*
+ :width: 2cm
+
+.. an SVG image using height with units
+.. image:: svgimg.*
+ :height: 2cm
+
.. an image with more than 1 dot in its file name
.. image:: img.foo.png
diff --git a/tests/roots/test-root/markup.txt b/tests/roots/test-root/markup.txt
index ff677eb..91f4194 100644
--- a/tests/roots/test-root/markup.txt
+++ b/tests/roots/test-root/markup.txt
@@ -230,6 +230,19 @@ Tables with multirow and multicol:
figure in table
+ * - .. warning::
+
+ warning in table
+
+ * - .. seealso::
+
+ figure in a seealso in a table
+
+ .. figure:: img.png
+
+ with a caption
+
+ and a legend
Figures
-------
diff --git a/tests/roots/test-templating/conf.py b/tests/roots/test-templating/conf.py
index e03eaf1..7a2baed 100644
--- a/tests/roots/test-templating/conf.py
+++ b/tests/roots/test-templating/conf.py
@@ -1,5 +1,7 @@
project = 'Sphinx templating <Tests>'
-source_suffix = '.txt'
+source_suffix = {
+ '.txt': 'restructuredtext'
+}
keep_warnings = True
templates_path = ['_templates']
release = version = '2013.120'
diff --git a/tests/roots/test-theming/test_theme/staticfiles/static/legacytmpl.html_t b/tests/roots/test-theming/test_theme/staticfiles/static/legacytmpl.html_t
new file mode 100644
index 0000000..8b505e2
--- /dev/null
+++ b/tests/roots/test-theming/test_theme/staticfiles/static/legacytmpl.html_t
@@ -0,0 +1,2 @@
+<!-- testing legacy _t static templates -->
+<html><project>{{ project | lower | escape }}</project></html>
diff --git a/tests/roots/test-theming/test_theme/staticfiles/static/statictmpl.html_t b/tests/roots/test-theming/test_theme/staticfiles/static/statictmpl.html.jinja
index 4ab292b..4ab292b 100644
--- a/tests/roots/test-theming/test_theme/staticfiles/static/statictmpl.html_t
+++ b/tests/roots/test-theming/test_theme/staticfiles/static/statictmpl.html.jinja
diff --git a/tests/roots/test-toctree-domain-objects/document_scoping.rst b/tests/roots/test-toctree-domain-objects/document_scoping.rst
new file mode 100644
index 0000000..49aba9e
--- /dev/null
+++ b/tests/roots/test-toctree-domain-objects/document_scoping.rst
@@ -0,0 +1,23 @@
+Level 1
+=======
+
+.. py:class:: ClassLevel1a
+ ClassLevel1b
+
+ .. py:method:: f()
+
+.. py:method:: ClassLevel1a.g()
+
+.. py:method:: ClassLevel1b.g()
+
+Level 2
+-------
+
+.. py:class:: ClassLevel2a
+ ClassLevel2b
+
+ .. py:method:: f()
+
+.. py:method:: ClassLevel2a.g()
+
+.. py:method:: ClassLevel2b.g()
diff --git a/tests/roots/test-toctree-domain-objects/index.rst b/tests/roots/test-toctree-domain-objects/index.rst
index 77ee010..5f04172 100644
--- a/tests/roots/test-toctree-domain-objects/index.rst
+++ b/tests/roots/test-toctree-domain-objects/index.rst
@@ -4,3 +4,4 @@
:name: mastertoc
domains
+ document_scoping
diff --git a/tests/roots/test-util-copyasset_overwrite/conf.py b/tests/roots/test-util-copyasset_overwrite/conf.py
new file mode 100644
index 0000000..bb91f31
--- /dev/null
+++ b/tests/roots/test-util-copyasset_overwrite/conf.py
@@ -0,0 +1,7 @@
+import os
+import sys
+sys.path.insert(0, os.path.abspath('.'))
+
+extensions = ['myext']
+html_static_path = ['user_static']
+html_theme = 'basic'
diff --git a/tests/roots/test-util-copyasset_overwrite/index.rst b/tests/roots/test-util-copyasset_overwrite/index.rst
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/roots/test-util-copyasset_overwrite/index.rst
diff --git a/tests/roots/test-util-copyasset_overwrite/myext.py b/tests/roots/test-util-copyasset_overwrite/myext.py
new file mode 100644
index 0000000..544057c
--- /dev/null
+++ b/tests/roots/test-util-copyasset_overwrite/myext.py
@@ -0,0 +1,22 @@
+from pathlib import Path
+
+from sphinx.util.fileutil import copy_asset
+
+
+def _copy_asset_overwrite_hook(app):
+ css = app.outdir / '_static' / 'custom-styles.css'
+ # html_static_path is copied by default
+ assert css.read_text() == '/* html_static_path */\n', 'invalid default text'
+ # warning generated by here
+ copy_asset(
+ Path(__file__).parent.joinpath('myext_static', 'custom-styles.css'),
+ app.outdir / '_static',
+ )
+ # This demonstrates the overwriting
+ assert css.read_text() == '/* extension styles */\n', 'overwriting failed'
+ return []
+
+
+def setup(app):
+ app.connect('html-collect-pages', _copy_asset_overwrite_hook)
+ app.add_css_file('custom-styles.css')
diff --git a/tests/roots/test-util-copyasset_overwrite/myext_static/custom-styles.css b/tests/roots/test-util-copyasset_overwrite/myext_static/custom-styles.css
new file mode 100644
index 0000000..9509354
--- /dev/null
+++ b/tests/roots/test-util-copyasset_overwrite/myext_static/custom-styles.css
@@ -0,0 +1 @@
+/* extension styles */
diff --git a/tests/roots/test-util-copyasset_overwrite/user_static/custom-styles.css b/tests/roots/test-util-copyasset_overwrite/user_static/custom-styles.css
new file mode 100644
index 0000000..1b892b9
--- /dev/null
+++ b/tests/roots/test-util-copyasset_overwrite/user_static/custom-styles.css
@@ -0,0 +1 @@
+/* html_static_path */
diff --git a/tests/roots/test-versioning/conf.py b/tests/roots/test-versioning/conf.py
index 6344cb0..d52d1f2 100644
--- a/tests/roots/test-versioning/conf.py
+++ b/tests/roots/test-versioning/conf.py
@@ -1,3 +1,5 @@
project = 'versioning test root'
-source_suffix = '.txt'
+source_suffix = {
+ '.txt': 'restructuredtext'
+}
exclude_patterns = ['_build']
diff --git a/tests/test_addnodes.py b/tests/test_addnodes.py
index aa99343..b3f77ad 100644
--- a/tests/test_addnodes.py
+++ b/tests/test_addnodes.py
@@ -12,7 +12,7 @@ if TYPE_CHECKING:
from collections.abc import Iterator
-@pytest.fixture()
+@pytest.fixture
def sig_elements() -> Iterator[set[type[addnodes.desc_sig_element]]]:
"""Fixture returning the current ``addnodes.SIG_ELEMENTS`` set."""
original = addnodes.SIG_ELEMENTS.copy() # safe copy of the current nodes
diff --git a/tests/test_application.py b/tests/test_application.py
index 1fc49d6..9326ba5 100644
--- a/tests/test_application.py
+++ b/tests/test_application.py
@@ -96,7 +96,7 @@ def test_add_source_parser(app, status, warning):
# .rst; only in :confval:`source_suffix`
assert '.rst' not in app.registry.get_source_parsers()
- assert app.registry.source_suffix['.rst'] is None
+ assert app.registry.source_suffix['.rst'] == 'restructuredtext'
# .test; configured by API
assert app.registry.source_suffix['.test'] == 'test'
diff --git a/tests/test_builders/test_build.py b/tests/test_builders/test_build.py
index 3f6d12c..0e649f7 100644
--- a/tests/test_builders/test_build.py
+++ b/tests/test_builders/test_build.py
@@ -21,7 +21,7 @@ def request_session_head(url, **kwargs):
return response
-@pytest.fixture()
+@pytest.fixture
def nonascii_srcdir(request, rootdir, sphinx_test_tempdir):
# Build in a non-ASCII source dir
test_name = '\u65e5\u672c\u8a9e'
diff --git a/tests/test_builders/test_build_dirhtml.py b/tests/test_builders/test_build_dirhtml.py
index dc5ab86..93609e3 100644
--- a/tests/test_builders/test_build_dirhtml.py
+++ b/tests/test_builders/test_build_dirhtml.py
@@ -28,13 +28,13 @@ def test_dirhtml(app, status, warning):
invdata = InventoryFile.load(f, 'path/to', posixpath.join)
assert 'index' in invdata.get('std:doc')
- assert invdata['std:doc']['index'] == ('Python', '', 'path/to/', '-')
+ assert invdata['std:doc']['index'] == ('Project name not set', '', 'path/to/', '-')
assert 'foo/index' in invdata.get('std:doc')
- assert invdata['std:doc']['foo/index'] == ('Python', '', 'path/to/foo/', '-')
+ assert invdata['std:doc']['foo/index'] == ('Project name not set', '', 'path/to/foo/', '-')
assert 'index' in invdata.get('std:label')
- assert invdata['std:label']['index'] == ('Python', '', 'path/to/#index', '-')
+ assert invdata['std:label']['index'] == ('Project name not set', '', 'path/to/#index', '-')
assert 'foo' in invdata.get('std:label')
- assert invdata['std:label']['foo'] == ('Python', '', 'path/to/foo/#foo', 'foo/index')
+ assert invdata['std:label']['foo'] == ('Project name not set', '', 'path/to/foo/#foo', 'foo/index')
diff --git a/tests/test_builders/test_build_epub.py b/tests/test_builders/test_build_epub.py
index 6829f22..691ffcc 100644
--- a/tests/test_builders/test_build_epub.py
+++ b/tests/test_builders/test_build_epub.py
@@ -67,7 +67,7 @@ def test_build_epub(app):
# toc.ncx
toc = EPUBElementTree.fromstring((app.outdir / 'toc.ncx').read_text(encoding='utf8'))
- assert toc.find("./ncx:docTitle/ncx:text").text == 'Python'
+ assert toc.find("./ncx:docTitle/ncx:text").text == 'Project name not set'
# toc.ncx / head
meta = list(toc.find("./ncx:head"))
@@ -91,11 +91,11 @@ def test_build_epub(app):
# content.opf / metadata
metadata = opf.find("./idpf:metadata")
assert metadata.find("./dc:language").text == 'en'
- assert metadata.find("./dc:title").text == 'Python'
+ assert metadata.find("./dc:title").text == 'Project name not set'
assert metadata.find("./dc:description").text == 'unknown'
- assert metadata.find("./dc:creator").text == 'unknown'
+ assert metadata.find("./dc:creator").text == 'Author name not set'
assert metadata.find("./dc:contributor").text == 'unknown'
- assert metadata.find("./dc:publisher").text == 'unknown'
+ assert metadata.find("./dc:publisher").text == 'Author name not set'
assert metadata.find("./dc:rights").text is None
assert metadata.find("./idpf:meta[@property='ibooks:version']").text is None
assert metadata.find("./idpf:meta[@property='ibooks:specified-fonts']").text == 'true'
@@ -171,7 +171,7 @@ def test_nested_toc(app):
# toc.ncx
toc = EPUBElementTree.fromstring((app.outdir / 'toc.ncx').read_bytes())
- assert toc.find("./ncx:docTitle/ncx:text").text == 'Python'
+ assert toc.find("./ncx:docTitle/ncx:text").text == 'Project name not set'
# toc.ncx / navPoint
def navinfo(elem):
@@ -409,6 +409,7 @@ def test_copy_images(app, status, warning):
images = {image.name for image in images_dir.rglob('*')}
images.discard('python-logo.png')
assert images == {
+ # 'ba30773957c3fe046897111afd65a80b81cad089.png', # epub: image from data:image/png URI in source
'img.png',
'rimg.png',
'rimg1.png',
diff --git a/tests/test_builders/test_build_gettext.py b/tests/test_builders/test_build_gettext.py
index ddc6d30..dc8f4c9 100644
--- a/tests/test_builders/test_build_gettext.py
+++ b/tests/test_builders/test_build_gettext.py
@@ -16,13 +16,12 @@ if sys.version_info[:2] >= (3, 11):
else:
from sphinx.util.osutil import _chdir as chdir
-_MSGID_PATTERN = re.compile(r'msgid "(.*)"')
+_MSGID_PATTERN = re.compile(r'msgid "((?:\n|.)*?)"\nmsgstr', re.MULTILINE)
-def msgid_getter(msgid):
- if m := _MSGID_PATTERN.search(msgid):
- return m[1]
- return None
+def get_msgids(pot):
+ matches = _MSGID_PATTERN.findall(pot)
+ return [m.replace('"\n"', '') for m in matches[1:]]
def test_Catalog_duplicated_message():
@@ -105,7 +104,7 @@ def test_gettext_index_entries(app):
app.build(filenames=[app.srcdir / 'index_entries.txt'])
pot = (app.outdir / 'index_entries.pot').read_text(encoding='utf8')
- msg_ids = list(filter(None, map(msgid_getter, pot.splitlines())))
+ msg_ids = get_msgids(pot)
assert msg_ids == [
"i18n with index entries",
@@ -134,7 +133,7 @@ def test_gettext_disable_index_entries(app):
app.build(filenames=[app.srcdir / 'index_entries.txt'])
pot = (app.outdir / 'index_entries.pot').read_text(encoding='utf8')
- msg_ids = list(filter(None, map(msgid_getter, pot.splitlines())))
+ msg_ids = get_msgids(pot)
assert msg_ids == [
"i18n with index entries",
@@ -200,7 +199,7 @@ def test_gettext_prolog_epilog_substitution(app):
assert (app.outdir / 'prolog_epilog_substitution.pot').is_file()
pot = (app.outdir / 'prolog_epilog_substitution.pot').read_text(encoding='utf8')
- msg_ids = list(filter(None, map(msgid_getter, pot.splitlines())))
+ msg_ids = get_msgids(pot)
assert msg_ids == [
"i18n with prologue and epilogue substitutions",
@@ -227,9 +226,43 @@ def test_gettext_prolog_epilog_substitution_excluded(app):
assert (app.outdir / 'prolog_epilog_substitution_excluded.pot').is_file()
pot = (app.outdir / 'prolog_epilog_substitution_excluded.pot').read_text(encoding='utf8')
- msg_ids = list(filter(None, map(msgid_getter, pot.splitlines())))
+ msg_ids = get_msgids(pot)
assert msg_ids == [
"i18n without prologue and epilogue substitutions",
"This is content that does not include prologue and epilogue substitutions.",
]
+
+
+@pytest.mark.sphinx(
+ 'gettext', srcdir='gettext',
+ confoverrides={'gettext_compact': False,
+ 'gettext_additional_targets': ['literal-block', 'doctest-block']})
+def test_gettext_literalblock_additional(app):
+ app.build(force_all=True)
+
+ assert (app.outdir / 'literalblock.pot').is_file()
+ pot = (app.outdir / 'literalblock.pot').read_text(encoding='utf8')
+ msg_ids = get_msgids(pot)
+
+ assert msg_ids == [
+ 'i18n with literal block',
+ 'Correct literal block::',
+ 'this is\\nliteral block',
+ 'Missing literal block::',
+ "That's all.",
+ 'included raw.txt',
+ '===\\nRaw\\n===\\n\\n.. raw:: html\\n\\n <iframe src=\\"https://sphinx-doc.org\\"></iframe>\\n\\n',
+ 'code blocks',
+ "def main\\n 'result'\\nend",
+ '#include <stdlib.h>\\nint main(int argc, char** argv)\\n{\\n return 0;\\n}',
+ 'example of C language',
+ '#include <stdio.h>\\nint main(int argc, char** argv)\\n{\\n return 0;\\n}',
+ 'literal-block\\nin list',
+ 'test_code_for_noqa()\\ncontinued()',
+ 'doctest blocks',
+ '>>> import sys # sys importing\\n>>> def main(): # define main '
+ "function\\n... sys.stdout.write('hello') # call write method of "
+ "stdout object\\n>>>\\n>>> if __name__ == '__main__': # if run this py "
+ 'file as python script\\n... main() # call main',
+ ]
diff --git a/tests/test_builders/test_build_html.py b/tests/test_builders/test_build_html.py
index 1fa3ba4..8db0790 100644
--- a/tests/test_builders/test_build_html.py
+++ b/tests/test_builders/test_build_html.py
@@ -1,5 +1,6 @@
"""Test the HTML builder and check output against XPath."""
+import contextlib
import os
import posixpath
import re
@@ -8,7 +9,7 @@ import pytest
from sphinx.builders.html import validate_html_extra_path, validate_html_static_path
from sphinx.deprecation import RemovedInSphinx80Warning
-from sphinx.errors import ConfigError
+from sphinx.errors import ConfigError, ThemeError
from sphinx.util.console import strip_colors
from sphinx.util.inventory import InventoryFile
@@ -16,6 +17,31 @@ from tests.test_builders.xpath_data import FIGURE_CAPTION
from tests.test_builders.xpath_util import check_xpath
+def test_html_sidebars_error(make_app, tmp_path):
+ (tmp_path / 'conf.py').touch()
+ (tmp_path / 'index.rst').touch()
+ app = make_app(
+ buildername='html',
+ srcdir=tmp_path,
+ confoverrides={'html_sidebars': {'index': 'searchbox.html'}},
+ )
+
+ # Test that the error is logged
+ warnings = app.warning.getvalue()
+ assert ("ERROR: Values in 'html_sidebars' must be a list of strings. "
+ "At least one pattern has a string value: 'index'. "
+ "Change to `html_sidebars = {'index': ['searchbox.html']}`.") in warnings
+
+ # But that the value is unchanged.
+ # (Remove this bit of the test in Sphinx 8)
+ def _html_context_hook(app, pagename, templatename, context, doctree):
+ assert context["sidebars"] == 'searchbox.html'
+ app.connect('html-page-context', _html_context_hook)
+ with contextlib.suppress(ThemeError):
+ # ignore template rendering issues (ThemeError).
+ app.build()
+
+
def test_html4_error(make_app, tmp_path):
(tmp_path / 'conf.py').write_text('', encoding='utf-8')
with pytest.raises(
@@ -131,24 +157,24 @@ def test_html_inventory(app):
'py-modindex',
'genindex',
'search'}
- assert invdata['std:label']['modindex'] == ('Python',
+ assert invdata['std:label']['modindex'] == ('Project name not set',
'',
'https://www.google.com/py-modindex.html',
'Module Index')
- assert invdata['std:label']['py-modindex'] == ('Python',
+ assert invdata['std:label']['py-modindex'] == ('Project name not set',
'',
'https://www.google.com/py-modindex.html',
'Python Module Index')
- assert invdata['std:label']['genindex'] == ('Python',
+ assert invdata['std:label']['genindex'] == ('Project name not set',
'',
'https://www.google.com/genindex.html',
'Index')
- assert invdata['std:label']['search'] == ('Python',
+ assert invdata['std:label']['search'] == ('Project name not set',
'',
'https://www.google.com/search.html',
'Search Page')
assert set(invdata['std:doc'].keys()) == {'index'}
- assert invdata['std:doc']['index'] == ('Python',
+ assert invdata['std:doc']['index'] == ('Project name not set',
'',
'https://www.google.com/index.html',
'The basic Sphinx documentation for testing')
@@ -222,8 +248,8 @@ def test_html_sidebar(app, status, warning):
app.build(force_all=True)
result = (app.outdir / 'index.html').read_text(encoding='utf8')
assert ('<div class="sphinxsidebar" role="navigation" '
- 'aria-label="main navigation">' in result)
- assert '<h1 class="logo"><a href="#">Python</a></h1>' in result
+ 'aria-label="Main">' in result)
+ assert '<h1 class="logo"><a href="#">Project name not set</a></h1>' in result
assert '<h3>Navigation</h3>' in result
assert '<h3>Related Topics</h3>' in result
assert '<h3 id="searchlabel">Quick search</h3>' in result
@@ -237,7 +263,7 @@ def test_html_sidebar(app, status, warning):
app.build(force_all=True)
result = (app.outdir / 'index.html').read_text(encoding='utf8')
assert ('<div class="sphinxsidebar" role="navigation" '
- 'aria-label="main navigation">' in result)
+ 'aria-label="Main">' in result)
assert '<h1 class="logo"><a href="#">Python</a></h1>' not in result
assert '<h3>Navigation</h3>' not in result
assert '<h3>Related Topics</h3>' in result
@@ -251,7 +277,7 @@ def test_html_sidebar(app, status, warning):
app.build(force_all=True)
result = (app.outdir / 'index.html').read_text(encoding='utf8')
assert ('<div class="sphinxsidebar" role="navigation" '
- 'aria-label="main navigation">' not in result)
+ 'aria-label="Main">' not in result)
assert '<h1 class="logo"><a href="#">Python</a></h1>' not in result
assert '<h3>Navigation</h3>' not in result
assert '<h3>Related Topics</h3>' not in result
@@ -376,3 +402,34 @@ def test_html_remove_sources_before_write_gh_issue_10786(app, warning):
file = os.fsdecode(target)
assert f'WARNING: cannot copy image file {file!r}: {file!s} does not exist' == ws[-1]
+
+
+@pytest.mark.sphinx('html', testroot='domain-py-python_maximum_signature_line_length',
+ confoverrides={'python_maximum_signature_line_length': 1})
+def test_html_pep_695_one_type_per_line(app, cached_etree_parse):
+ app.build()
+ fname = app.outdir / 'index.html'
+ etree = cached_etree_parse(fname)
+
+ class chk:
+ def __init__(self, expect):
+ self.expect = expect
+
+ def __call__(self, nodes):
+ assert len(nodes) == 1, nodes
+ objnode = ''.join(nodes[0].itertext()).replace('\n\n', '')
+ objnode = objnode.rstrip(chr(182)) # remove '¶' symbol
+ objnode = objnode.strip('\n') # remove surrounding new lines
+ assert objnode == self.expect
+
+ # each signature has a dangling ',' at the end of its parameters lists
+ check_xpath(etree, fname, r'.//dt[@id="generic_foo"][1]',
+ chk('generic_foo[\nT,\n]()'))
+ check_xpath(etree, fname, r'.//dt[@id="generic_bar"][1]',
+ chk('generic_bar[\nT,\n](\nx: list[T],\n)'))
+ check_xpath(etree, fname, r'.//dt[@id="generic_ret"][1]',
+ chk('generic_ret[\nR,\n]() → R'))
+ check_xpath(etree, fname, r'.//dt[@id="MyGenericClass"][1]',
+ chk('class MyGenericClass[\nX,\n]'))
+ check_xpath(etree, fname, r'.//dt[@id="MyList"][1]',
+ chk('class MyList[\nT,\n](list[T])'))
diff --git a/tests/test_builders/test_build_html_5_output.py b/tests/test_builders/test_build_html_5_output.py
index ece6f49..388c324 100644
--- a/tests/test_builders/test_build_html_5_output.py
+++ b/tests/test_builders/test_build_html_5_output.py
@@ -3,6 +3,7 @@
import re
import pytest
+from docutils import nodes
from tests.test_builders.xpath_util import check_xpath
@@ -25,6 +26,9 @@ def tail_check(check):
('images.html', ".//img[@src='_images/simg.png']", ''),
('images.html', ".//img[@src='_images/svgimg.svg']", ''),
('images.html', ".//a[@href='_sources/images.txt']", ''),
+ # Check svg options
+ ('images.html', ".//img[@src='_images/svgimg.svg'][@style='width: 2cm;']", ''),
+ ('images.html', ".//img[@src='_images/svgimg.svg'][@style='height: 2cm;']", ''),
('subdir/images.html', ".//img[@src='../_images/img1.png']", ''),
('subdir/images.html', ".//img[@src='../_images/rimg.png']", ''),
@@ -255,6 +259,8 @@ def tail_check(check):
('extensions.html', ".//a[@href='https://python.org/dev/']", "https://python.org/dev/"),
('extensions.html', ".//a[@href='https://bugs.python.org/issue1000']", "issue 1000"),
('extensions.html', ".//a[@href='https://bugs.python.org/issue1042']", "explicit caption"),
+ ('extensions.html', ".//a[@class='extlink-pyurl reference external']", "https://python.org/dev/"),
+ ('extensions.html', ".//a[@class='extlink-issue reference external']", "issue 1000"),
# index entries
('genindex.html', ".//a/strong", "Main"),
@@ -270,7 +276,32 @@ def tail_check(check):
])
@pytest.mark.sphinx('html', tags=['testtag'],
confoverrides={'html_context.hckey_co': 'hcval_co'})
-@pytest.mark.test_params(shared_result='test_build_html_output')
def test_html5_output(app, cached_etree_parse, fname, path, check):
app.build()
check_xpath(cached_etree_parse(app.outdir / fname), fname, path, check)
+
+
+@pytest.mark.sphinx('html', testroot='markup-rubric')
+def test_html5_rubric(app):
+ def insert_invalid_rubric_heading_level(app, doctree, docname):
+ if docname != 'index':
+ return
+ new_node = nodes.rubric('', 'INSERTED RUBRIC')
+ new_node['heading-level'] = 7
+ doctree[0].append(new_node)
+
+ app.connect('doctree-resolved', insert_invalid_rubric_heading_level)
+ app.build()
+
+ warnings = app.warning.getvalue()
+ content = (app.outdir / 'index.html').read_text(encoding='utf8')
+ assert '<p class="rubric">This is a rubric</p>' in content
+ assert '<h2 class="myclass rubric">A rubric with a heading level 2</h2>' in content
+
+ # directive warning
+ assert '"7" unknown' in warnings
+
+ # html writer warning
+ assert 'WARNING: unsupported rubric heading level: 7' in warnings
+ assert '</h7>' not in content
+ assert '<p class="rubric">INSERTED RUBRIC</p>' in content
diff --git a/tests/test_builders/test_build_html_assets.py b/tests/test_builders/test_build_html_assets.py
index fc7a987..e2c7c75 100644
--- a/tests/test_builders/test_build_html_assets.py
+++ b/tests/test_builders/test_build_html_assets.py
@@ -34,7 +34,7 @@ def test_html_assets(app):
# html_extra_path
assert (app.outdir / '.htaccess').exists()
assert not (app.outdir / '.htpasswd').exists()
- assert (app.outdir / 'API.html_t').exists()
+ assert (app.outdir / 'API.html.jinja').exists()
assert (app.outdir / 'css/style.css').exists()
assert (app.outdir / 'rimg.png').exists()
assert not (app.outdir / '_build' / 'index.html').exists()
diff --git a/tests/test_builders/test_build_html_download.py b/tests/test_builders/test_build_html_download.py
index 1201c66..14332d8 100644
--- a/tests/test_builders/test_build_html_download.py
+++ b/tests/test_builders/test_build_html_download.py
@@ -5,7 +5,6 @@ import pytest
@pytest.mark.sphinx('html')
-@pytest.mark.test_params(shared_result='test_build_html_output')
def test_html_download(app):
app.build()
diff --git a/tests/test_builders/test_build_html_image.py b/tests/test_builders/test_build_html_image.py
index 08ed618..860beb6 100644
--- a/tests/test_builders/test_build_html_image.py
+++ b/tests/test_builders/test_build_html_image.py
@@ -29,7 +29,7 @@ def test_html_remote_logo(app, status, warning):
app.build(force_all=True)
result = (app.outdir / 'index.html').read_text(encoding='utf8')
- assert ('<img class="logo" src="https://www.python.org/static/img/python-logo.png" alt="Logo"/>' in result)
+ assert ('<img class="logo" src="https://www.python.org/static/img/python-logo.png" alt="Logo of Project name not set"/>' in result)
assert ('<link rel="icon" href="https://www.python.org/static/favicon.ico"/>' in result)
assert not (app.outdir / 'python-logo.png').exists()
@@ -39,7 +39,7 @@ def test_html_local_logo(app, status, warning):
app.build(force_all=True)
result = (app.outdir / 'index.html').read_text(encoding='utf8')
- assert ('<img class="logo" src="_static/img.png" alt="Logo"/>' in result)
+ assert ('<img class="logo" src="_static/img.png" alt="Logo of Project name not set"/>' in result)
assert (app.outdir / '_static/img.png').exists()
@@ -72,6 +72,7 @@ def test_copy_images(app, status, warning):
images_dir = Path(app.outdir) / '_images'
images = {image.name for image in images_dir.rglob('*')}
assert images == {
+ # 'ba30773957c3fe046897111afd65a80b81cad089.png', # html: image from data:image/png URI in source
'img.png',
'rimg.png',
'rimg1.png',
diff --git a/tests/test_builders/test_build_latex.py b/tests/test_builders/test_build_latex.py
index 0776c74..56505b4 100644
--- a/tests/test_builders/test_build_latex.py
+++ b/tests/test_builders/test_build_latex.py
@@ -41,7 +41,7 @@ def kpsetest(*filenames):
# compile latex document with app.config.latex_engine
-def compile_latex_document(app, filename='python.tex', docclass='manual'):
+def compile_latex_document(app, filename='projectnamenotset.tex', docclass='manual'):
# now, try to run latex over it
try:
with chdir(app.outdir):
@@ -158,21 +158,21 @@ def test_writer(app, status, warning):
assert ('\\begin{wrapfigure}{r}{0pt}\n\\centering\n'
'\\noindent\\sphinxincludegraphics{{rimg}.png}\n'
- '\\caption{figure with align option}\\label{\\detokenize{markup:id9}}'
+ '\\caption{figure with align option}\\label{\\detokenize{markup:id10}}'
'\\end{wrapfigure}\n\n'
'\\mbox{}\\par\\vskip-\\dimexpr\\baselineskip+\\parskip\\relax' in result)
assert ('\\begin{wrapfigure}{r}{0.500\\linewidth}\n\\centering\n'
'\\noindent\\sphinxincludegraphics{{rimg}.png}\n'
'\\caption{figure with align \\& figwidth option}'
- '\\label{\\detokenize{markup:id10}}'
+ '\\label{\\detokenize{markup:id11}}'
'\\end{wrapfigure}\n\n'
'\\mbox{}\\par\\vskip-\\dimexpr\\baselineskip+\\parskip\\relax' in result)
assert ('\\begin{wrapfigure}{r}{3cm}\n\\centering\n'
'\\noindent\\sphinxincludegraphics[width=3cm]{{rimg}.png}\n'
'\\caption{figure with align \\& width option}'
- '\\label{\\detokenize{markup:id11}}'
+ '\\label{\\detokenize{markup:id12}}'
'\\end{wrapfigure}\n\n'
'\\mbox{}\\par\\vskip-\\dimexpr\\baselineskip+\\parskip\\relax' in result)
@@ -255,7 +255,7 @@ def test_latex_basic_howto_ja(app, status, warning):
@pytest.mark.sphinx('latex', testroot='latex-theme')
def test_latex_theme(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
assert r'\def\sphinxdocclass{book}' in result
assert r'\documentclass[a4paper,12pt,english]{sphinxbook}' in result
@@ -266,7 +266,7 @@ def test_latex_theme(app, status, warning):
'pointsize': '9pt'}})
def test_latex_theme_papersize(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
assert r'\def\sphinxdocclass{book}' in result
assert r'\documentclass[b5paper,9pt,english]{sphinxbook}' in result
@@ -277,7 +277,7 @@ def test_latex_theme_papersize(app, status, warning):
'pointsize': '9pt'}})
def test_latex_theme_options(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
assert r'\def\sphinxdocclass{book}' in result
assert r'\documentclass[b5paper,9pt,english]{sphinxbook}' in result
@@ -330,7 +330,7 @@ def test_latex_release(app, status, warning):
confoverrides={'numfig': True})
def test_numref(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -372,7 +372,7 @@ def test_numref(app, status, warning):
'section': 'SECTION-%s'}})
def test_numref_with_prefix1(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -420,7 +420,7 @@ def test_numref_with_prefix1(app, status, warning):
'section': 'SECTION_%s_'}})
def test_numref_with_prefix2(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -460,7 +460,7 @@ def test_numref_with_prefix2(app, status, warning):
confoverrides={'numfig': True, 'language': 'ja'})
def test_numref_with_language_ja(app, status, warning):
app.build()
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -556,7 +556,7 @@ def test_latex_add_latex_package(app, status, warning):
@pytest.mark.sphinx('latex', testroot='latex-babel')
def test_babel_with_no_language_settings(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -581,7 +581,7 @@ def test_babel_with_no_language_settings(app, status, warning):
confoverrides={'language': 'de'})
def test_babel_with_language_de(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -606,7 +606,7 @@ def test_babel_with_language_de(app, status, warning):
confoverrides={'language': 'ru'})
def test_babel_with_language_ru(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -631,7 +631,7 @@ def test_babel_with_language_ru(app, status, warning):
confoverrides={'language': 'tr'})
def test_babel_with_language_tr(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -656,7 +656,7 @@ def test_babel_with_language_tr(app, status, warning):
confoverrides={'language': 'ja'})
def test_babel_with_language_ja(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -680,7 +680,7 @@ def test_babel_with_language_ja(app, status, warning):
confoverrides={'language': 'unknown'})
def test_babel_with_unknown_language(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -707,7 +707,7 @@ def test_babel_with_unknown_language(app, status, warning):
confoverrides={'language': 'de', 'latex_engine': 'lualatex'})
def test_polyglossia_with_language_de(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -733,7 +733,7 @@ def test_polyglossia_with_language_de(app, status, warning):
confoverrides={'language': 'de-1901', 'latex_engine': 'lualatex'})
def test_polyglossia_with_language_de_1901(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -786,7 +786,7 @@ def test_footnote(app, status, warning):
@pytest.mark.sphinx('latex', testroot='footnotes')
def test_reference_in_caption_and_codeblock_in_footnote(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -826,7 +826,7 @@ def test_reference_in_caption_and_codeblock_in_footnote(app, status, warning):
@pytest.mark.sphinx('latex', testroot='footnotes')
def test_footnote_referred_multiple_times(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -848,7 +848,7 @@ def test_footnote_referred_multiple_times(app, status, warning):
confoverrides={'latex_show_urls': 'inline'})
def test_latex_show_urls_is_inline(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -905,7 +905,7 @@ def test_latex_show_urls_is_inline(app, status, warning):
confoverrides={'latex_show_urls': 'footnote'})
def test_latex_show_urls_is_footnote(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -967,7 +967,7 @@ def test_latex_show_urls_is_footnote(app, status, warning):
confoverrides={'latex_show_urls': 'no'})
def test_latex_show_urls_is_no(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -1022,7 +1022,7 @@ def test_latex_show_urls_footnote_and_substitutions(app, status, warning):
@pytest.mark.sphinx('latex', testroot='image-in-section')
def test_image_in_section(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -1045,7 +1045,7 @@ def test_latex_logo_if_not_found(app, status, warning):
@pytest.mark.sphinx('latex', testroot='toctree-maxdepth')
def test_toctree_maxdepth_manual(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -1057,12 +1057,12 @@ def test_toctree_maxdepth_manual(app, status, warning):
@pytest.mark.sphinx(
'latex', testroot='toctree-maxdepth',
confoverrides={'latex_documents': [
- ('index', 'python.tex', 'Sphinx Tests Documentation',
+ ('index', 'projectnamenotset.tex', 'Sphinx Tests Documentation',
'Georg Brandl', 'howto'),
]})
def test_toctree_maxdepth_howto(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -1076,7 +1076,7 @@ def test_toctree_maxdepth_howto(app, status, warning):
confoverrides={'root_doc': 'foo'})
def test_toctree_not_found(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -1090,7 +1090,7 @@ def test_toctree_not_found(app, status, warning):
confoverrides={'root_doc': 'bar'})
def test_toctree_without_maxdepth(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -1103,7 +1103,7 @@ def test_toctree_without_maxdepth(app, status, warning):
confoverrides={'root_doc': 'qux'})
def test_toctree_with_deeper_maxdepth(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -1116,7 +1116,7 @@ def test_toctree_with_deeper_maxdepth(app, status, warning):
confoverrides={'latex_toplevel_sectioning': None})
def test_latex_toplevel_sectioning_is_None(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -1128,7 +1128,7 @@ def test_latex_toplevel_sectioning_is_None(app, status, warning):
confoverrides={'latex_toplevel_sectioning': 'part'})
def test_latex_toplevel_sectioning_is_part(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -1141,12 +1141,12 @@ def test_latex_toplevel_sectioning_is_part(app, status, warning):
'latex', testroot='toctree-maxdepth',
confoverrides={'latex_toplevel_sectioning': 'part',
'latex_documents': [
- ('index', 'python.tex', 'Sphinx Tests Documentation',
+ ('index', 'projectnamenotset.tex', 'Sphinx Tests Documentation',
'Georg Brandl', 'howto'),
]})
def test_latex_toplevel_sectioning_is_part_with_howto(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -1160,7 +1160,7 @@ def test_latex_toplevel_sectioning_is_part_with_howto(app, status, warning):
confoverrides={'latex_toplevel_sectioning': 'chapter'})
def test_latex_toplevel_sectioning_is_chapter(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -1171,12 +1171,12 @@ def test_latex_toplevel_sectioning_is_chapter(app, status, warning):
'latex', testroot='toctree-maxdepth',
confoverrides={'latex_toplevel_sectioning': 'chapter',
'latex_documents': [
- ('index', 'python.tex', 'Sphinx Tests Documentation',
+ ('index', 'projectnamenotset.tex', 'Sphinx Tests Documentation',
'Georg Brandl', 'howto'),
]})
def test_latex_toplevel_sectioning_is_chapter_with_howto(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -1188,7 +1188,7 @@ def test_latex_toplevel_sectioning_is_chapter_with_howto(app, status, warning):
confoverrides={'latex_toplevel_sectioning': 'section'})
def test_latex_toplevel_sectioning_is_section(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
@@ -1199,11 +1199,11 @@ def test_latex_toplevel_sectioning_is_section(app, status, warning):
@pytest.mark.sphinx('latex', testroot='maxlistdepth')
def test_maxlistdepth_at_ten(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
print(status.getvalue())
print(warning.getvalue())
- compile_latex_document(app, 'python.tex')
+ compile_latex_document(app, 'projectnamenotset.tex')
@pytest.mark.sphinx('latex', testroot='latex-table',
@@ -1211,7 +1211,7 @@ def test_maxlistdepth_at_ten(app, status, warning):
@pytest.mark.test_params(shared_result='latex-table')
def test_latex_table_tabulars(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
tables = {}
for chap in re.split(r'\\(?:section|chapter){', result)[1:]:
sectname, content = chap.split('}', 1)
@@ -1282,7 +1282,7 @@ def test_latex_table_tabulars(app, status, warning):
@pytest.mark.test_params(shared_result='latex-table')
def test_latex_table_longtable(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
tables = {}
for chap in re.split(r'\\(?:section|chapter){', result)[1:]:
sectname, content = chap.split('}', 1)
@@ -1343,7 +1343,7 @@ def test_latex_table_longtable(app, status, warning):
@pytest.mark.test_params(shared_result='latex-table')
def test_latex_table_complex_tables(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
tables = {}
for chap in re.split(r'\\(?:section|renewcommand){', result)[1:]:
sectname, content = chap.split('}', 1)
@@ -1373,7 +1373,7 @@ def test_latex_table_complex_tables(app, status, warning):
@pytest.mark.sphinx('latex', testroot='latex-table')
def test_latex_table_with_booktabs_and_colorrows(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
assert r'\PassOptionsToPackage{booktabs}{sphinx}' in result
assert r'\PassOptionsToPackage{colorrows}{sphinx}' in result
# tabularcolumns
@@ -1389,15 +1389,16 @@ def test_latex_table_with_booktabs_and_colorrows(app, status, warning):
confoverrides={'templates_path': ['_mytemplates/latex']})
def test_latex_table_custom_template_caseA(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
assert 'SALUT LES COPAINS' in result
+ assert 'AU REVOIR, KANIGGETS' in result
@pytest.mark.sphinx('latex', testroot='latex-table',
confoverrides={'templates_path': ['_mytemplates']})
def test_latex_table_custom_template_caseB(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
assert 'SALUT LES COPAINS' not in result
@@ -1405,14 +1406,14 @@ def test_latex_table_custom_template_caseB(app, status, warning):
@pytest.mark.test_params(shared_result='latex-table')
def test_latex_table_custom_template_caseC(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
assert 'SALUT LES COPAINS' not in result
@pytest.mark.sphinx('latex', testroot='directives-raw')
def test_latex_raw_directive(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
# standard case
assert 'standalone raw directive (HTML)' not in result
@@ -1429,7 +1430,7 @@ def test_latex_images(app, status, warning):
with http_server(RemoteImageHandler, port=7777):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
# images are copied
assert '\\sphinxincludegraphics{{sphinx}.png}' in result
@@ -1453,7 +1454,7 @@ def test_latex_images(app, status, warning):
def test_latex_index(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
assert ('A \\index{famous@\\spxentry{famous}}famous '
'\\index{equation@\\spxentry{equation}}equation:\n' in result)
assert ('\n\\index{Einstein@\\spxentry{Einstein}}'
@@ -1467,7 +1468,7 @@ def test_latex_index(app, status, warning):
def test_latex_equations(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
expected = (app.srcdir / 'expects' / 'latex-equations.tex').read_text(encoding='utf8').strip()
assert expected in result
@@ -1477,7 +1478,7 @@ def test_latex_equations(app, status, warning):
def test_latex_image_in_parsed_literal(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
assert ('{\\sphinxunactivateextrasandspace \\raisebox{-0.5\\height}'
'{\\sphinxincludegraphics[height=2.00000cm]{{pic}.png}}'
'}AFTER') in result
@@ -1487,7 +1488,7 @@ def test_latex_image_in_parsed_literal(app, status, warning):
def test_latex_nested_enumerated_list(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
assert ('\\sphinxsetlistlabels{\\arabic}{enumi}{enumii}{}{.}%\n'
'\\setcounter{enumi}{4}\n' in result)
assert ('\\sphinxsetlistlabels{\\alph}{enumii}{enumiii}{}{.}%\n'
@@ -1504,7 +1505,7 @@ def test_latex_nested_enumerated_list(app, status, warning):
def test_latex_thebibliography(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
assert ('\\begin{sphinxthebibliography}{AuthorYe}\n'
'\\bibitem[AuthorYear]{index:authoryear}\n\\sphinxAtStartPar\n'
@@ -1517,7 +1518,7 @@ def test_latex_thebibliography(app, status, warning):
def test_latex_glossary(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
assert (r'\sphinxlineitem{ähnlich\index{ähnlich@\spxentry{ähnlich}|spxpagem}'
r'\phantomsection'
r'\label{\detokenize{index:term-ahnlich}}}' in result)
@@ -1541,7 +1542,7 @@ def test_latex_glossary(app, status, warning):
def test_latex_labels(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
# figures
assert (r'\caption{labeled figure}'
@@ -1589,8 +1590,10 @@ def test_latex_labels(app, status, warning):
@pytest.mark.sphinx('latex', testroot='latex-figure-in-admonition')
def test_latex_figure_in_admonition(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
- assert r'\begin{figure}[H]' in result
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
+ assert 'tabulary' not in result
+ for type in ('caution', 'note', 'seealso', 'todo'):
+ assert f'{type} directive.\n\n\\begin{{figure}}[H]' in result
def test_default_latex_documents():
@@ -1619,7 +1622,7 @@ def test_includegraphics_oversized(app, status, warning):
@pytest.mark.sphinx('latex', testroot='index_on_title')
def test_index_on_title(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
assert ('\\chapter{Test for index in top level title}\n'
'\\label{\\detokenize{contents:test-for-index-in-top-level-title}}'
'\\index{index@\\spxentry{index}}\n'
@@ -1630,7 +1633,7 @@ def test_index_on_title(app, status, warning):
confoverrides={'latex_engine': 'pdflatex'})
def test_texescape_for_non_unicode_supported_engine(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
assert 'script small e: e' in result
assert 'double struck italic small i: i' in result
@@ -1642,7 +1645,7 @@ def test_texescape_for_non_unicode_supported_engine(app, status, warning):
confoverrides={'latex_engine': 'xelatex'})
def test_texescape_for_unicode_supported_engine(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(result)
assert 'script small e: e' in result
assert 'double struck italic small i: i' in result
@@ -1667,7 +1670,7 @@ def test_latex_nested_tables(app, status, warning):
@pytest.mark.sphinx('latex', testroot='latex-container')
def test_latex_container(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
assert r'\begin{sphinxuseclass}{classname}' in result
assert r'\end{sphinxuseclass}' in result
@@ -1675,7 +1678,7 @@ def test_latex_container(app, status, warning):
@pytest.mark.sphinx('latex', testroot='reST-code-role')
def test_latex_code_role(app):
app.build()
- content = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ content = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
common_content = (
r'\PYG{k}{def} '
@@ -1710,6 +1713,7 @@ def test_copy_images(app, status, warning):
}
images.discard('sphinx.png')
assert images == {
+ 'ba30773957c3fe046897111afd65a80b81cad089.png', # latex: image from data:image/png URI in source
'img.pdf',
'rimg.png',
'testimäge.png',
@@ -1719,7 +1723,7 @@ def test_copy_images(app, status, warning):
@pytest.mark.sphinx('latex', testroot='latex-labels-before-module')
def test_duplicated_labels_before_module(app, status, warning):
app.build()
- content: str = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ content: str = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
def count_label(name):
text = r'\phantomsection\label{\detokenize{%s}}' % name
@@ -1750,10 +1754,40 @@ def test_duplicated_labels_before_module(app, status, warning):
confoverrides={'python_maximum_signature_line_length': 23})
def test_one_parameter_per_line(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
# TODO: should these asserts check presence or absence of a final \sphinxparamcomma?
# signature of 23 characters is too short to trigger one-param-per-line mark-up
assert ('\\pysiglinewithargsret{\\sphinxbfcode{\\sphinxupquote{hello}}}' in result)
assert ('\\pysigwithonelineperarg{\\sphinxbfcode{\\sphinxupquote{foo}}}' in result)
+
+ # generic_arg[T]
+ assert ('\\pysiglinewithargsretwithtypelist{\\sphinxbfcode{\\sphinxupquote{generic\\_arg}}}'
+ '{\\sphinxtypeparam{\\DUrole{n}{T}}}{}{}' in result)
+
+ # generic_foo[T]()
+ assert ('\\pysiglinewithargsretwithtypelist{\\sphinxbfcode{\\sphinxupquote{generic\\_foo}}}' in result)
+
+ # generic_bar[T](x: list[T])
+ assert ('\\pysigwithonelineperargwithtypelist{\\sphinxbfcode{\\sphinxupquote{generic\\_bar}}}' in result)
+
+ # generic_ret[R]() -> R
+ assert ('\\pysiglinewithargsretwithtypelist{\\sphinxbfcode{\\sphinxupquote{generic\\_ret}}}'
+ '{\\sphinxtypeparam{\\DUrole{n}{R}}}{}{{ $\\rightarrow$ R}}' in result)
+
+ # MyGenericClass[X]
+ assert ('\\pysiglinewithargsretwithtypelist{\\sphinxbfcode{\\sphinxupquote{class\\DUrole{w}{ '
+ '}}}\\sphinxbfcode{\\sphinxupquote{MyGenericClass}}}' in result)
+
+ # MyList[T](list[T])
+ assert ('\\pysiglinewithargsretwithtypelist{\\sphinxbfcode{\\sphinxupquote{class\\DUrole{w}{ '
+ '}}}\\sphinxbfcode{\\sphinxupquote{MyList}}}' in result)
+
+
+@pytest.mark.sphinx('latex', testroot='markup-rubric')
+def test_latex_rubric(app):
+ app.build()
+ content = (app.outdir / 'test.tex').read_text(encoding='utf8')
+ assert r'\subsubsection*{This is a rubric}' in content
+ assert r'\subsection*{A rubric with a heading level 2}' in content
diff --git a/tests/test_builders/test_build_linkcheck.py b/tests/test_builders/test_build_linkcheck.py
index c8d8515..0787661 100644
--- a/tests/test_builders/test_build_linkcheck.py
+++ b/tests/test_builders/test_build_linkcheck.py
@@ -11,6 +11,7 @@ import wsgiref.handlers
from base64 import b64encode
from http.server import BaseHTTPRequestHandler
from queue import Queue
+from typing import TYPE_CHECKING
from unittest import mock
import docutils
@@ -20,6 +21,7 @@ from urllib3.poolmanager import PoolManager
import sphinx.util.http_date
from sphinx.builders.linkcheck import (
CheckRequest,
+ CheckResult,
Hyperlink,
HyperlinkAvailabilityCheckWorker,
RateLimit,
@@ -33,6 +35,12 @@ from tests.utils import CERT_FILE, serve_application
ts_re = re.compile(r".*\[(?P<ts>.*)\].*")
+if TYPE_CHECKING:
+ from collections.abc import Callable, Iterable
+ from io import StringIO
+
+ from sphinx.application import Sphinx
+
class DefaultsHandler(BaseHTTPRequestHandler):
protocol_version = "HTTP/1.1"
@@ -101,7 +109,7 @@ class ConnectionMeasurement:
@pytest.mark.sphinx('linkcheck', testroot='linkcheck', freshenv=True)
-def test_defaults(app):
+def test_defaults(app: Sphinx) -> None:
with serve_application(app, DefaultsHandler) as address:
with ConnectionMeasurement() as m:
app.build()
@@ -146,7 +154,7 @@ def test_defaults(app):
'info': '',
}
- def _missing_resource(filename: str, lineno: int):
+ def _missing_resource(filename: str, lineno: int) -> dict[str, str | int]:
return {
'filename': 'links.rst',
'lineno': lineno,
@@ -178,7 +186,7 @@ def test_defaults(app):
@pytest.mark.sphinx(
'linkcheck', testroot='linkcheck', freshenv=True,
confoverrides={'linkcheck_anchors': False})
-def test_check_link_response_only(app):
+def test_check_link_response_only(app: Sphinx) -> None:
with serve_application(app, DefaultsHandler) as address:
app.build()
@@ -192,7 +200,7 @@ def test_check_link_response_only(app):
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-too-many-retries', freshenv=True)
-def test_too_many_retries(app):
+def test_too_many_retries(app: Sphinx) -> None:
with serve_application(app, DefaultsHandler) as address:
app.build()
@@ -221,7 +229,7 @@ def test_too_many_retries(app):
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-raw-node', freshenv=True)
-def test_raw_node(app):
+def test_raw_node(app: Sphinx) -> None:
with serve_application(app, OKHandler) as address:
# write an index file that contains a link back to this webserver's root
# URL. docutils will replace the raw node with the contents retrieved..
@@ -254,7 +262,7 @@ def test_raw_node(app):
@pytest.mark.sphinx(
'linkcheck', testroot='linkcheck-anchors-ignore', freshenv=True,
confoverrides={'linkcheck_anchors_ignore': ["^!", "^top$"]})
-def test_anchors_ignored(app):
+def test_anchors_ignored(app: Sphinx) -> None:
with serve_application(app, OKHandler):
app.build()
@@ -266,6 +274,43 @@ def test_anchors_ignored(app):
class AnchorsIgnoreForUrlHandler(BaseHTTPRequestHandler):
+ protocol_version = 'HTTP/1.1'
+
+ def _chunk_content(self, content: str, *, max_chunk_size: int) -> Iterable[bytes]:
+
+ def _encode_chunk(chunk: bytes) -> Iterable[bytes]:
+ """Encode a bytestring into a format suitable for HTTP chunked-transfer.
+
+ https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Transfer-Encoding
+ """
+ yield f'{len(chunk):X}'.encode('ascii')
+ yield b'\r\n'
+ yield chunk
+ yield b'\r\n'
+
+ buffer = b''
+ for char in content:
+ buffer += char.encode('utf-8')
+ if len(buffer) >= max_chunk_size:
+ chunk, buffer = buffer[:max_chunk_size], buffer[max_chunk_size:]
+ yield from _encode_chunk(chunk)
+
+ # Flush remaining bytes, if any
+ if buffer:
+ yield from _encode_chunk(buffer)
+
+ # Emit a final empty chunk to close the stream
+ yield from _encode_chunk(b'')
+
+ def _send_chunked(self, content: str) -> bool:
+ for chunk in self._chunk_content(content, max_chunk_size=20):
+ try:
+ self.wfile.write(chunk)
+ except (BrokenPipeError, ConnectionResetError) as e:
+ self.log_message(str(e))
+ return False
+ return True
+
def do_HEAD(self):
if self.path in {'/valid', '/ignored'}:
self.send_response(200, "OK")
@@ -274,17 +319,24 @@ class AnchorsIgnoreForUrlHandler(BaseHTTPRequestHandler):
self.end_headers()
def do_GET(self):
- self.do_HEAD()
if self.path == '/valid':
- self.wfile.write(b"<h1 id='valid-anchor'>valid anchor</h1>\n")
+ self.send_response(200, 'OK')
+ content = "<h1 id='valid-anchor'>valid anchor</h1>\n"
elif self.path == '/ignored':
- self.wfile.write(b"no anchor but page exists\n")
+ self.send_response(200, 'OK')
+ content = 'no anchor but page exists\n'
+ else:
+ self.send_response(404, 'Not Found')
+ content = 'not found\n'
+ self.send_header('Transfer-Encoding', 'chunked')
+ self.end_headers()
+ self._send_chunked(content)
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-anchors-ignore-for-url', freshenv=True)
-def test_anchors_ignored_for_url(app):
+def test_anchors_ignored_for_url(app: Sphinx) -> None:
with serve_application(app, AnchorsIgnoreForUrlHandler) as address:
- app.config.linkcheck_anchors_ignore_for_url = [ # type: ignore[attr-defined]
+ app.config.linkcheck_anchors_ignore_for_url = [
f'http://{address}/ignored', # existing page
f'http://{address}/invalid', # unknown page
]
@@ -295,7 +347,7 @@ def test_anchors_ignored_for_url(app):
attrs = ('filename', 'lineno', 'status', 'code', 'uri', 'info')
data = [json.loads(x) for x in content.splitlines()]
- assert len(data) == 7
+ assert len(data) == 8
assert all(all(attr in row for attr in attrs) for row in data)
# rows may be unsorted due to network latency or
@@ -304,6 +356,7 @@ def test_anchors_ignored_for_url(app):
assert rows[f'http://{address}/valid']['status'] == 'working'
assert rows[f'http://{address}/valid#valid-anchor']['status'] == 'working'
+ assert rows[f'http://{address}/valid#py:module::urllib.parse']['status'] == 'broken'
assert rows[f'http://{address}/valid#invalid-anchor'] == {
'status': 'broken',
'info': "Anchor 'invalid-anchor' not found",
@@ -323,7 +376,7 @@ def test_anchors_ignored_for_url(app):
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-anchor', freshenv=True)
-def test_raises_for_invalid_status(app):
+def test_raises_for_invalid_status(app: Sphinx) -> None:
class InternalServerErrorHandler(BaseHTTPRequestHandler):
protocol_version = "HTTP/1.1"
@@ -340,6 +393,50 @@ def test_raises_for_invalid_status(app):
)
+@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-anchor', freshenv=True)
+def test_incomplete_html_anchor(app):
+ class IncompleteHTMLDocumentHandler(BaseHTTPRequestHandler):
+ protocol_version = 'HTTP/1.1'
+
+ def do_GET(self):
+ content = b'this is <div id="anchor">not</div> a valid HTML document'
+ self.send_response(200, 'OK')
+ self.send_header('Content-Length', str(len(content)))
+ self.end_headers()
+ self.wfile.write(content)
+
+ with serve_application(app, IncompleteHTMLDocumentHandler):
+ app.build()
+
+ content = (app.outdir / 'output.json').read_text(encoding='utf8')
+ assert len(content.splitlines()) == 1
+
+ row = json.loads(content)
+ assert row['status'] == 'working'
+
+
+@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-anchor', freshenv=True)
+def test_decoding_error_anchor_ignored(app):
+ class NonASCIIHandler(BaseHTTPRequestHandler):
+ protocol_version = 'HTTP/1.1'
+
+ def do_GET(self):
+ content = b'\x80\x00\x80\x00' # non-ASCII byte-string
+ self.send_response(200, 'OK')
+ self.send_header('Content-Length', str(len(content)))
+ self.end_headers()
+ self.wfile.write(content)
+
+ with serve_application(app, NonASCIIHandler):
+ app.build()
+
+ content = (app.outdir / 'output.json').read_text(encoding='utf8')
+ assert len(content.splitlines()) == 1
+
+ row = json.loads(content)
+ assert row['status'] == 'ignored'
+
+
def custom_handler(valid_credentials=(), success_criteria=lambda _: True):
"""
Returns an HTTP request handler that authenticates the client and then determines
@@ -352,25 +449,27 @@ def custom_handler(valid_credentials=(), success_criteria=lambda _: True):
expected_token = b64encode(":".join(valid_credentials).encode()).decode("utf-8")
del valid_credentials
+ def authenticated(
+ method: Callable[[CustomHandler], None]
+ ) -> Callable[[CustomHandler], None]:
+ def method_if_authenticated(self):
+ if expected_token is None:
+ return method(self)
+ elif not self.headers["Authorization"]:
+ self.send_response(401, "Unauthorized")
+ self.end_headers()
+ elif self.headers["Authorization"] == f"Basic {expected_token}":
+ return method(self)
+ else:
+ self.send_response(403, "Forbidden")
+ self.send_header("Content-Length", "0")
+ self.end_headers()
+
+ return method_if_authenticated
+
class CustomHandler(BaseHTTPRequestHandler):
protocol_version = "HTTP/1.1"
- def authenticated(method):
- def method_if_authenticated(self):
- if expected_token is None:
- return method(self)
- elif not self.headers["Authorization"]:
- self.send_response(401, "Unauthorized")
- self.end_headers()
- elif self.headers["Authorization"] == f"Basic {expected_token}":
- return method(self)
- else:
- self.send_response(403, "Forbidden")
- self.send_header("Content-Length", "0")
- self.end_headers()
-
- return method_if_authenticated
-
@authenticated
def do_HEAD(self):
self.do_GET()
@@ -389,9 +488,9 @@ def custom_handler(valid_credentials=(), success_criteria=lambda _: True):
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver', freshenv=True)
-def test_auth_header_uses_first_match(app):
+def test_auth_header_uses_first_match(app: Sphinx) -> None:
with serve_application(app, custom_handler(valid_credentials=("user1", "password"))) as address:
- app.config.linkcheck_auth = [ # type: ignore[attr-defined]
+ app.config.linkcheck_auth = [
(r'^$', ('no', 'match')),
(fr'^http://{re.escape(address)}/$', ('user1', 'password')),
(r'.*local.*', ('user2', 'hunter2')),
@@ -408,7 +507,7 @@ def test_auth_header_uses_first_match(app):
@pytest.mark.sphinx(
'linkcheck', testroot='linkcheck-localserver', freshenv=True,
confoverrides={'linkcheck_allow_unauthorized': False})
-def test_unauthorized_broken(app):
+def test_unauthorized_broken(app: Sphinx) -> None:
with serve_application(app, custom_handler(valid_credentials=("user1", "password"))):
app.build()
@@ -422,7 +521,7 @@ def test_unauthorized_broken(app):
@pytest.mark.sphinx(
'linkcheck', testroot='linkcheck-localserver', freshenv=True,
confoverrides={'linkcheck_auth': [(r'^$', ('user1', 'password'))]})
-def test_auth_header_no_match(app):
+def test_auth_header_no_match(app: Sphinx) -> None:
with (
serve_application(app, custom_handler(valid_credentials=("user1", "password"))),
pytest.warns(RemovedInSphinx80Warning, match='linkcheck builder encountered an HTTP 401'),
@@ -438,14 +537,14 @@ def test_auth_header_no_match(app):
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver', freshenv=True)
-def test_linkcheck_request_headers(app):
+def test_linkcheck_request_headers(app: Sphinx) -> None:
def check_headers(self):
if "X-Secret" in self.headers:
return False
return self.headers["Accept"] == "text/html"
with serve_application(app, custom_handler(success_criteria=check_headers)) as address:
- app.config.linkcheck_request_headers = { # type: ignore[attr-defined]
+ app.config.linkcheck_request_headers = {
f"http://{address}/": {"Accept": "text/html"},
"*": {"X-Secret": "open sesami"},
}
@@ -458,14 +557,14 @@ def test_linkcheck_request_headers(app):
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver', freshenv=True)
-def test_linkcheck_request_headers_no_slash(app):
+def test_linkcheck_request_headers_no_slash(app: Sphinx) -> None:
def check_headers(self):
if "X-Secret" in self.headers:
return False
return self.headers["Accept"] == "application/json"
with serve_application(app, custom_handler(success_criteria=check_headers)) as address:
- app.config.linkcheck_request_headers = { # type: ignore[attr-defined]
+ app.config.linkcheck_request_headers = {
f"http://{address}": {"Accept": "application/json"},
"*": {"X-Secret": "open sesami"},
}
@@ -483,7 +582,7 @@ def test_linkcheck_request_headers_no_slash(app):
"http://do.not.match.org": {"Accept": "application/json"},
"*": {"X-Secret": "open sesami"},
}})
-def test_linkcheck_request_headers_default(app):
+def test_linkcheck_request_headers_default(app: Sphinx) -> None:
def check_headers(self):
if self.headers["X-Secret"] != "open sesami":
return False
@@ -566,9 +665,9 @@ def test_follows_redirects_on_GET(app, capsys, warning):
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-warn-redirects')
-def test_linkcheck_allowed_redirects(app, warning):
+def test_linkcheck_allowed_redirects(app: Sphinx, warning: StringIO) -> None:
with serve_application(app, make_redirect_handler(support_head=False)) as address:
- app.config.linkcheck_allowed_redirects = {f'http://{address}/.*1': '.*'} # type: ignore[attr-defined]
+ app.config.linkcheck_allowed_redirects = {f'http://{address}/.*1': '.*'}
compile_linkcheck_allowed_redirects(app, app.config)
app.build()
@@ -626,7 +725,7 @@ def test_invalid_ssl(get_request, app):
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-https', freshenv=True)
-def test_connect_to_selfsigned_fails(app):
+def test_connect_to_selfsigned_fails(app: Sphinx) -> None:
with serve_application(app, OKHandler, tls_enabled=True) as address:
app.build()
@@ -639,9 +738,9 @@ def test_connect_to_selfsigned_fails(app):
assert "[SSL: CERTIFICATE_VERIFY_FAILED]" in content["info"]
-@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-https', freshenv=True)
-def test_connect_to_selfsigned_with_tls_verify_false(app):
- app.config.tls_verify = False
+@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-https', freshenv=True,
+ confoverrides={'tls_verify': False})
+def test_connect_to_selfsigned_with_tls_verify_false(app: Sphinx) -> None:
with serve_application(app, OKHandler, tls_enabled=True) as address:
app.build()
@@ -657,9 +756,9 @@ def test_connect_to_selfsigned_with_tls_verify_false(app):
}
-@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-https', freshenv=True)
-def test_connect_to_selfsigned_with_tls_cacerts(app):
- app.config.tls_cacerts = CERT_FILE
+@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-https', freshenv=True,
+ confoverrides={'tls_cacerts': CERT_FILE})
+def test_connect_to_selfsigned_with_tls_cacerts(app: Sphinx) -> None:
with serve_application(app, OKHandler, tls_enabled=True) as address:
app.build()
@@ -693,9 +792,9 @@ def test_connect_to_selfsigned_with_requests_env_var(monkeypatch, app):
}
-@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-https', freshenv=True)
-def test_connect_to_selfsigned_nonexistent_cert_file(app):
- app.config.tls_cacerts = "does/not/exist"
+@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-https', freshenv=True,
+ confoverrides={'tls_cacerts': "does/not/exist"})
+def test_connect_to_selfsigned_nonexistent_cert_file(app: Sphinx) -> None:
with serve_application(app, OKHandler, tls_enabled=True) as address:
app.build()
@@ -863,7 +962,7 @@ def test_too_many_requests_retry_after_without_header(app, capsys):
'linkcheck_timeout': 0.01,
}
)
-def test_requests_timeout(app):
+def test_requests_timeout(app: Sphinx) -> None:
class DelayedResponseHandler(BaseHTTPRequestHandler):
protocol_version = "HTTP/1.1"
@@ -882,9 +981,9 @@ def test_requests_timeout(app):
assert content["status"] == "timeout"
-@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver', freshenv=True)
-def test_too_many_requests_user_timeout(app):
- app.config.linkcheck_rate_limit_timeout = 0.0
+@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver', freshenv=True,
+ confoverrides={'linkcheck_rate_limit_timeout': 0.0})
+def test_too_many_requests_user_timeout(app: Sphinx) -> None:
with serve_application(app, make_retry_after_handler([(429, None)])) as address:
app.build()
content = (app.outdir / 'output.json').read_text(encoding='utf8')
@@ -903,21 +1002,21 @@ class FakeResponse:
url = "http://localhost/"
-def test_limit_rate_default_sleep(app):
+def test_limit_rate_default_sleep(app: Sphinx) -> None:
worker = HyperlinkAvailabilityCheckWorker(app.config, Queue(), Queue(), {})
with mock.patch('time.time', return_value=0.0):
next_check = worker.limit_rate(FakeResponse.url, FakeResponse.headers.get("Retry-After"))
assert next_check == 60.0
-def test_limit_rate_user_max_delay(app):
- app.config.linkcheck_rate_limit_timeout = 0.0
+@pytest.mark.sphinx(confoverrides={'linkcheck_rate_limit_timeout': 0.0})
+def test_limit_rate_user_max_delay(app: Sphinx) -> None:
worker = HyperlinkAvailabilityCheckWorker(app.config, Queue(), Queue(), {})
next_check = worker.limit_rate(FakeResponse.url, FakeResponse.headers.get("Retry-After"))
assert next_check is None
-def test_limit_rate_doubles_previous_wait_time(app):
+def test_limit_rate_doubles_previous_wait_time(app: Sphinx) -> None:
rate_limits = {"localhost": RateLimit(60.0, 0.0)}
worker = HyperlinkAvailabilityCheckWorker(app.config, Queue(), Queue(), rate_limits)
with mock.patch('time.time', return_value=0.0):
@@ -925,21 +1024,23 @@ def test_limit_rate_doubles_previous_wait_time(app):
assert next_check == 120.0
-def test_limit_rate_clips_wait_time_to_max_time(app):
- app.config.linkcheck_rate_limit_timeout = 90.0
+@pytest.mark.sphinx(confoverrides={'linkcheck_rate_limit_timeout': 90})
+def test_limit_rate_clips_wait_time_to_max_time(app: Sphinx, warning: StringIO) -> None:
rate_limits = {"localhost": RateLimit(60.0, 0.0)}
worker = HyperlinkAvailabilityCheckWorker(app.config, Queue(), Queue(), rate_limits)
with mock.patch('time.time', return_value=0.0):
next_check = worker.limit_rate(FakeResponse.url, FakeResponse.headers.get("Retry-After"))
assert next_check == 90.0
+ assert warning.getvalue() == ''
-def test_limit_rate_bails_out_after_waiting_max_time(app):
- app.config.linkcheck_rate_limit_timeout = 90.0
+@pytest.mark.sphinx(confoverrides={'linkcheck_rate_limit_timeout': 90.0})
+def test_limit_rate_bails_out_after_waiting_max_time(app: Sphinx, warning: StringIO) -> None:
rate_limits = {"localhost": RateLimit(90.0, 0.0)}
worker = HyperlinkAvailabilityCheckWorker(app.config, Queue(), Queue(), rate_limits)
next_check = worker.limit_rate(FakeResponse.url, FakeResponse.headers.get("Retry-After"))
assert next_check is None
+ assert warning.getvalue() == ''
@mock.patch('sphinx.util.requests.requests.Session.get_adapter')
@@ -957,11 +1058,13 @@ def test_connection_contention(get_adapter, app, capsys):
# Place a workload into the linkcheck queue
link_count = 10
- rqueue, wqueue = Queue(), Queue()
+ wqueue: Queue[CheckRequest] = Queue()
+ rqueue: Queue[CheckResult] = Queue()
for _ in range(link_count):
wqueue.put(CheckRequest(0, Hyperlink(f"http://{address}", "test", "test.rst", 1)))
- begin, checked = time.time(), []
+ begin = time.time()
+ checked: list[CheckResult] = []
threads = [
HyperlinkAvailabilityCheckWorker(
config=app.config,
@@ -997,7 +1100,7 @@ class ConnectionResetHandler(BaseHTTPRequestHandler):
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver', freshenv=True)
-def test_get_after_head_raises_connection_error(app):
+def test_get_after_head_raises_connection_error(app: Sphinx) -> None:
with serve_application(app, ConnectionResetHandler) as address:
app.build()
content = (app.outdir / 'output.txt').read_text(encoding='utf8')
@@ -1014,7 +1117,7 @@ def test_get_after_head_raises_connection_error(app):
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-documents_exclude', freshenv=True)
-def test_linkcheck_exclude_documents(app):
+def test_linkcheck_exclude_documents(app: Sphinx) -> None:
with serve_application(app, DefaultsHandler):
app.build()
diff --git a/tests/test_builders/test_build_manpage.py b/tests/test_builders/test_build_manpage.py
index 7172281..31d75d6 100644
--- a/tests/test_builders/test_build_manpage.py
+++ b/tests/test_builders/test_build_manpage.py
@@ -7,6 +7,8 @@ from sphinx.builders.manpage import default_man_pages
from sphinx.config import Config
+@pytest.mark.xfail(docutils.__version_info__[:2] > (0, 21),
+ reason='Docutils has removed the reference key in master')
@pytest.mark.sphinx('man')
def test_all(app, status, warning):
app.build(force_all=True)
@@ -44,13 +46,15 @@ def test_man_pages_empty_description(app, status, warning):
confoverrides={'man_make_section_directory': True})
def test_man_make_section_directory(app, status, warning):
app.build()
- assert (app.outdir / 'man1' / 'python.1').exists()
+ assert (app.outdir / 'man1' / 'projectnamenotset.1').exists()
+@pytest.mark.xfail(docutils.__version_info__[:2] > (0, 21),
+ reason='Docutils has removed the reference key in master')
@pytest.mark.sphinx('man', testroot='directive-code')
def test_captioned_code_block(app, status, warning):
app.build(force_all=True)
- content = (app.outdir / 'python.1').read_text(encoding='utf8')
+ content = (app.outdir / 'projectnamenotset.1').read_text(encoding='utf8')
if docutils.__version_info__[:2] < (0, 21):
expected = """\
@@ -100,5 +104,5 @@ def test_default_man_pages():
@pytest.mark.sphinx('man', testroot='markup-rubric')
def test_rubric(app, status, warning):
app.build()
- content = (app.outdir / 'python.1').read_text(encoding='utf8')
+ content = (app.outdir / 'projectnamenotset.1').read_text(encoding='utf8')
assert 'This is a rubric\n' in content
diff --git a/tests/test_builders/test_build_texinfo.py b/tests/test_builders/test_build_texinfo.py
index f9effb2..6abbc96 100644
--- a/tests/test_builders/test_build_texinfo.py
+++ b/tests/test_builders/test_build_texinfo.py
@@ -40,16 +40,17 @@ def test_texinfo(app, status, warning):
def test_texinfo_rubric(app, status, warning):
app.build()
- output = (app.outdir / 'python.texi').read_text(encoding='utf8')
+ output = (app.outdir / 'projectnamenotset.texi').read_text(encoding='utf8')
assert '@heading This is a rubric' in output
assert '@heading This is a multiline rubric' in output
+ assert '@heading A rubric with a heading level' in output
@pytest.mark.sphinx('texinfo', testroot='markup-citation')
def test_texinfo_citation(app, status, warning):
app.build(force_all=True)
- output = (app.outdir / 'python.texi').read_text(encoding='utf8')
+ output = (app.outdir / 'projectnamenotset.texi').read_text(encoding='utf8')
assert 'This is a citation ref; @ref{1,,[CITE1]} and @ref{2,,[CITE2]}.' in output
assert ('@anchor{index cite1}@anchor{1}@w{(CITE1)} \n'
'This is a citation\n') in output
@@ -87,7 +88,7 @@ def test_texinfo_escape_id(app, status, warning):
def test_texinfo_footnote(app, status, warning):
app.build(force_all=True)
- output = (app.outdir / 'python.texi').read_text(encoding='utf8')
+ output = (app.outdir / 'projectnamenotset.texi').read_text(encoding='utf8')
assert 'First footnote: @footnote{\nFirst\n}' in output
@@ -120,10 +121,11 @@ def test_texinfo_samp_with_variable(app, status, warning):
def test_copy_images(app, status, warning):
app.build()
- images_dir = Path(app.outdir) / 'python-figures'
+ images_dir = Path(app.outdir) / 'projectnamenotset-figures'
images = {image.name for image in images_dir.rglob('*')}
images.discard('python-logo.png')
assert images == {
+ 'ba30773957c3fe046897111afd65a80b81cad089.png', # texinfo: image from data:image/png URI in source
'img.png',
'rimg.png',
'testimäge.png',
diff --git a/tests/test_config/test_config.py b/tests/test_config/test_config.py
index e1cb1b0..e58044e 100644
--- a/tests/test_config/test_config.py
+++ b/tests/test_config/test_config.py
@@ -403,7 +403,7 @@ def test_errors_if_setup_is_not_callable(tmp_path, make_app):
assert 'callable' in str(excinfo.value)
-@pytest.fixture()
+@pytest.fixture
def make_app_with_empty_project(make_app, tmp_path):
(tmp_path / 'conf.py').write_text('', encoding='utf8')
@@ -803,3 +803,19 @@ def test_gettext_compact_command_line_str():
# regression test for #8549 (-D gettext_compact=spam)
assert config.gettext_compact == 'spam'
+
+
+def test_root_doc_and_master_doc_are_synchronized():
+ c = Config()
+ assert c.master_doc == 'index'
+ assert c.root_doc == c.master_doc
+
+ c = Config()
+ c.master_doc = '1234'
+ assert c.master_doc == '1234'
+ assert c.root_doc == c.master_doc
+
+ c = Config()
+ c.root_doc = '1234'
+ assert c.master_doc == '1234'
+ assert c.root_doc == c.master_doc
diff --git a/tests/test_directives/test_directive_code.py b/tests/test_directives/test_directive_code.py
index 2783d8f..6180195 100644
--- a/tests/test_directives/test_directive_code.py
+++ b/tests/test_directives/test_directive_code.py
@@ -104,7 +104,7 @@ def test_LiteralIncludeReader_lines_and_lineno_match1(literal_inc_path):
assert reader.lineno_start == 3
-@pytest.mark.sphinx() # init locale for errors
+@pytest.mark.sphinx # init locale for errors
def test_LiteralIncludeReader_lines_and_lineno_match2(literal_inc_path, app, status, warning):
options = {'lines': '0,3,5', 'lineno-match': True}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
@@ -112,7 +112,7 @@ def test_LiteralIncludeReader_lines_and_lineno_match2(literal_inc_path, app, sta
reader.read()
-@pytest.mark.sphinx() # init locale for errors
+@pytest.mark.sphinx # init locale for errors
def test_LiteralIncludeReader_lines_and_lineno_match3(literal_inc_path, app, status, warning):
options = {'lines': '100-', 'lineno-match': True}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
@@ -330,7 +330,7 @@ def test_code_block_caption_html(app, status, warning):
@pytest.mark.sphinx('latex', testroot='directive-code')
def test_code_block_caption_latex(app, status, warning):
app.build(force_all=True)
- latex = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ latex = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
caption = '\\sphinxSetupCaptionForVerbatim{caption \\sphinxstyleemphasis{test} rb}'
label = '\\def\\sphinxLiteralBlockLabel{\\label{\\detokenize{caption:id1}}}'
link = '\\hyperref[\\detokenize{caption:name-test-rb}]' \
@@ -343,7 +343,7 @@ def test_code_block_caption_latex(app, status, warning):
@pytest.mark.sphinx('latex', testroot='directive-code')
def test_code_block_namedlink_latex(app, status, warning):
app.build(force_all=True)
- latex = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ latex = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
label1 = '\\def\\sphinxLiteralBlockLabel{\\label{\\detokenize{caption:name-test-rb}}}'
link1 = '\\hyperref[\\detokenize{caption:name-test-rb}]'\
'{\\sphinxcrossref{\\DUrole{std,std-ref}{Ruby}}'
@@ -360,7 +360,7 @@ def test_code_block_namedlink_latex(app, status, warning):
@pytest.mark.sphinx('latex', testroot='directive-code')
def test_code_block_emphasize_latex(app, status, warning):
app.build(filenames=[app.srcdir / 'emphasize.rst'])
- latex = (app.outdir / 'python.tex').read_text(encoding='utf8').replace('\r\n', '\n')
+ latex = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8').replace('\r\n', '\n')
includes = '\\fvset{hllines={, 5, 6, 13, 14, 15, 24, 25, 26,}}%\n'
assert includes in latex
includes = '\\end{sphinxVerbatim}\n\\sphinxresetverbatimhllines\n'
@@ -424,7 +424,7 @@ def test_literal_include_linenos(app, status, warning):
@pytest.mark.sphinx('latex', testroot='directive-code')
def test_literalinclude_file_whole_of_emptyline(app, status, warning):
app.build(force_all=True)
- latex = (app.outdir / 'python.tex').read_text(encoding='utf8').replace('\r\n', '\n')
+ latex = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8').replace('\r\n', '\n')
includes = (
'\\begin{sphinxVerbatim}'
'[commandchars=\\\\\\{\\},numbers=left,firstnumber=1,stepnumber=1]\n'
@@ -450,7 +450,7 @@ def test_literalinclude_caption_html(app, status, warning):
@pytest.mark.sphinx('latex', testroot='directive-code')
def test_literalinclude_caption_latex(app, status, warning):
app.build(filenames='index')
- latex = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ latex = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
caption = '\\sphinxSetupCaptionForVerbatim{caption \\sphinxstylestrong{test} py}'
label = '\\def\\sphinxLiteralBlockLabel{\\label{\\detokenize{caption:id2}}}'
link = '\\hyperref[\\detokenize{caption:name-test-py}]' \
@@ -463,7 +463,7 @@ def test_literalinclude_caption_latex(app, status, warning):
@pytest.mark.sphinx('latex', testroot='directive-code')
def test_literalinclude_namedlink_latex(app, status, warning):
app.build(filenames='index')
- latex = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ latex = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
label1 = '\\def\\sphinxLiteralBlockLabel{\\label{\\detokenize{caption:name-test-py}}}'
link1 = '\\hyperref[\\detokenize{caption:name-test-py}]'\
'{\\sphinxcrossref{\\DUrole{std,std-ref}{Python}}'
diff --git a/tests/test_directives/test_directive_other.py b/tests/test_directives/test_directive_other.py
index 1feb251..e00e291 100644
--- a/tests/test_directives/test_directive_other.py
+++ b/tests/test_directives/test_directive_other.py
@@ -137,6 +137,18 @@ def test_reversed_toctree(app):
@pytest.mark.sphinx(testroot='toctree-glob')
+def test_toctree_class(app):
+ text = ('.. toctree::\n'
+ ' :class: custom-toc\n'
+ '\n'
+ ' foo\n')
+ app.env.find_files(app.config, app.builder)
+ doctree = restructuredtext.parse(app, text, 'index')
+ assert_node(doctree, [nodes.document, nodes.compound, addnodes.toctree])
+ assert doctree[0].attributes['classes'] == ['toctree-wrapper', 'custom-toc']
+
+
+@pytest.mark.sphinx(testroot='toctree-glob')
def test_toctree_twice(app):
text = (".. toctree::\n"
"\n"
diff --git a/tests/test_domains/test_domain_cpp.py b/tests/test_domains/test_domain_cpp.py
index abd0f82..d8e612e 100644
--- a/tests/test_domains/test_domain_cpp.py
+++ b/tests/test_domains/test_domain_cpp.py
@@ -1046,19 +1046,21 @@ def test_domain_cpp_ast_attributes():
check('enumerator', '{key}Foo [[attr1]] [[attr2]] = 42', {2: '3Foo'})
+def check_ast_xref_parsing(target):
+ class Config:
+ cpp_id_attributes = ["id_attr"]
+ cpp_paren_attributes = ["paren_attr"]
+
+ parser = DefinitionParser(target, location='', config=Config())
+ parser.parse_xref_object()
+ parser.assert_end()
+
+
def test_domain_cpp_ast_xref_parsing():
- def check(target):
- class Config:
- cpp_id_attributes = ["id_attr"]
- cpp_paren_attributes = ["paren_attr"]
- parser = DefinitionParser(target, location=None,
- config=Config())
- ast, isShorthand = parser.parse_xref_object()
- parser.assert_end()
- check('f')
- check('f()')
- check('void f()')
- check('T f()')
+ check_ast_xref_parsing('f')
+ check_ast_xref_parsing('f()')
+ check_ast_xref_parsing('void f()')
+ check_ast_xref_parsing('T f()')
@pytest.mark.parametrize(
@@ -1213,18 +1215,12 @@ def test_domain_cpp_build_misuse_of_roles(app, status, warning):
def test_domain_cpp_build_with_add_function_parentheses_is_True(app, status, warning):
app.build(force_all=True)
- def check(spec, text, file):
- pattern = '<li><p>%s<a .*?><code .*?><span .*?>%s</span></code></a></p></li>' % spec
- res = re.search(pattern, text)
- if not res:
- print(f"Pattern\n\t{pattern}\nnot found in {file}")
- raise AssertionError
rolePatterns = [
- ('', 'Sphinx'),
- ('', 'Sphinx::version'),
- ('', 'version'),
- ('', 'List'),
- ('', 'MyEnum'),
+ 'Sphinx',
+ 'Sphinx::version',
+ 'version',
+ 'List',
+ 'MyEnum',
]
parenPatterns = [
('ref function without parens ', r'paren_1\(\)'),
@@ -1237,35 +1233,33 @@ def test_domain_cpp_build_with_add_function_parentheses_is_True(app, status, war
('ref op call with parens, explicit title ', 'paren_8_title'),
]
- f = 'roles.html'
- t = (app.outdir / f).read_text(encoding='utf8')
- for s in rolePatterns:
- check(s, t, f)
- for s in parenPatterns:
- check(s, t, f)
+ text = (app.outdir / 'roles.html').read_text(encoding='utf8')
+ for ref_text in rolePatterns:
+ pattern = f'<li><p><a .*?><code .*?><span .*?>{ref_text}</span></code></a></p></li>'
+ match = re.search(pattern, text)
+ assert match is not None, f"Pattern not found in roles.html:\n\t{pattern}"
+ for (desc_text, ref_text) in parenPatterns:
+ pattern = f'<li><p>{desc_text}<a .*?><code .*?><span .*?>{ref_text}</span></code></a></p></li>'
+ match = re.search(pattern, text)
+ assert match is not None, f"Pattern not found in roles.html:\n\t{pattern}"
- f = 'any-role.html'
- t = (app.outdir / f).read_text(encoding='utf8')
- for s in parenPatterns:
- check(s, t, f)
+ text = (app.outdir / 'any-role.html').read_text(encoding='utf8')
+ for (desc_text, ref_text) in parenPatterns:
+ pattern = f'<li><p>{desc_text}<a .*?><code .*?><span .*?>{ref_text}</span></code></a></p></li>'
+ match = re.search(pattern, text)
+ assert match is not None, f"Pattern not found in any-role.html:\n\t{pattern}"
@pytest.mark.sphinx(testroot='domain-cpp', confoverrides={'add_function_parentheses': False})
def test_domain_cpp_build_with_add_function_parentheses_is_False(app, status, warning):
app.build(force_all=True)
- def check(spec, text, file):
- pattern = '<li><p>%s<a .*?><code .*?><span .*?>%s</span></code></a></p></li>' % spec
- res = re.search(pattern, text)
- if not res:
- print(f"Pattern\n\t{pattern}\nnot found in {file}")
- raise AssertionError
rolePatterns = [
- ('', 'Sphinx'),
- ('', 'Sphinx::version'),
- ('', 'version'),
- ('', 'List'),
- ('', 'MyEnum'),
+ 'Sphinx',
+ 'Sphinx::version',
+ 'version',
+ 'List',
+ 'MyEnum',
]
parenPatterns = [
('ref function without parens ', 'paren_1'),
@@ -1278,17 +1272,21 @@ def test_domain_cpp_build_with_add_function_parentheses_is_False(app, status, wa
('ref op call with parens, explicit title ', 'paren_8_title'),
]
- f = 'roles.html'
- t = (app.outdir / f).read_text(encoding='utf8')
- for s in rolePatterns:
- check(s, t, f)
- for s in parenPatterns:
- check(s, t, f)
-
- f = 'any-role.html'
- t = (app.outdir / f).read_text(encoding='utf8')
- for s in parenPatterns:
- check(s, t, f)
+ text = (app.outdir / 'roles.html').read_text(encoding='utf8')
+ for ref_text in rolePatterns:
+ pattern = f'<li><p><a .*?><code .*?><span .*?>{ref_text}</span></code></a></p></li>'
+ match = re.search(pattern, text)
+ assert match is not None, f"Pattern not found in roles.html:\n\t{pattern}"
+ for (desc_text, ref_text) in parenPatterns:
+ pattern = f'<li><p>{desc_text}<a .*?><code .*?><span .*?>{ref_text}</span></code></a></p></li>'
+ match = re.search(pattern, text)
+ assert match is not None, f"Pattern not found in roles.html:\n\t{pattern}"
+
+ text = (app.outdir / 'any-role.html').read_text(encoding='utf8')
+ for (desc_text, ref_text) in parenPatterns:
+ pattern = f'<li><p>{desc_text}<a .*?><code .*?><span .*?>{ref_text}</span></code></a></p></li>'
+ match = re.search(pattern, text)
+ assert match is not None, f"Pattern not found in any-role.html:\n\t{pattern}"
@pytest.mark.sphinx(testroot='domain-cpp')
diff --git a/tests/test_domains/test_domain_py.py b/tests/test_domains/test_domain_py.py
index e653c80..ce3d444 100644
--- a/tests/test_domains/test_domain_py.py
+++ b/tests/test_domains/test_domain_py.py
@@ -92,19 +92,21 @@ def test_domain_py_xrefs(app, status, warning):
refnodes = list(doctree.findall(pending_xref))
assert_refnode(refnodes[0], None, None, 'TopLevel', 'class')
assert_refnode(refnodes[1], None, None, 'top_level', 'meth')
- assert_refnode(refnodes[2], None, 'NestedParentA', 'child_1', 'meth')
- assert_refnode(refnodes[3], None, 'NestedParentA', 'NestedChildA.subchild_2', 'meth')
- assert_refnode(refnodes[4], None, 'NestedParentA', 'child_2', 'meth')
- assert_refnode(refnodes[5], False, 'NestedParentA', 'any_child', domain='')
- assert_refnode(refnodes[6], None, 'NestedParentA', 'NestedChildA', 'class')
- assert_refnode(refnodes[7], None, 'NestedParentA.NestedChildA', 'subchild_2', 'meth')
- assert_refnode(refnodes[8], None, 'NestedParentA.NestedChildA',
+ assert_refnode(refnodes[2], None, None, 'TopLevelType', 'type')
+ assert_refnode(refnodes[3], None, 'NestedParentA', 'child_1', 'meth')
+ assert_refnode(refnodes[4], None, 'NestedParentA', 'NestedChildA.subchild_2', 'meth')
+ assert_refnode(refnodes[5], None, 'NestedParentA', 'child_2', 'meth')
+ assert_refnode(refnodes[6], False, 'NestedParentA', 'any_child', domain='')
+ assert_refnode(refnodes[7], None, 'NestedParentA', 'NestedChildA', 'class')
+ assert_refnode(refnodes[8], None, 'NestedParentA.NestedChildA', 'subchild_2', 'meth')
+ assert_refnode(refnodes[9], None, 'NestedParentA.NestedChildA',
'NestedParentA.child_1', 'meth')
- assert_refnode(refnodes[9], None, 'NestedParentA', 'NestedChildA.subchild_1', 'meth')
- assert_refnode(refnodes[10], None, 'NestedParentB', 'child_1', 'meth')
- assert_refnode(refnodes[11], None, 'NestedParentB', 'NestedParentB', 'class')
- assert_refnode(refnodes[12], None, None, 'NestedParentA.NestedChildA', 'class')
- assert len(refnodes) == 13
+ assert_refnode(refnodes[10], None, 'NestedParentA', 'NestedChildA.subchild_1', 'meth')
+ assert_refnode(refnodes[11], None, 'NestedParentB', 'child_1', 'meth')
+ assert_refnode(refnodes[12], None, 'NestedParentB', 'NestedParentB', 'class')
+ assert_refnode(refnodes[13], None, None, 'NestedParentA.NestedChildA', 'class')
+ assert_refnode(refnodes[14], None, None, 'NestedParentA.NestedTypeA', 'type')
+ assert len(refnodes) == 15
doctree = app.env.get_doctree('module')
refnodes = list(doctree.findall(pending_xref))
@@ -135,7 +137,10 @@ def test_domain_py_xrefs(app, status, warning):
assert_refnode(refnodes[15], False, False, 'index', 'doc', domain='std')
assert_refnode(refnodes[16], False, False, 'typing.Literal', 'obj', domain='py')
assert_refnode(refnodes[17], False, False, 'typing.Literal', 'obj', domain='py')
- assert len(refnodes) == 18
+ assert_refnode(refnodes[18], False, False, 'list', 'class', domain='py')
+ assert_refnode(refnodes[19], False, False, 'int', 'class', domain='py')
+ assert_refnode(refnodes[20], False, False, 'str', 'class', domain='py')
+ assert len(refnodes) == 21
doctree = app.env.get_doctree('module_option')
refnodes = list(doctree.findall(pending_xref))
@@ -191,7 +196,9 @@ def test_domain_py_objects(app, status, warning):
assert objects['TopLevel'][2] == 'class'
assert objects['top_level'][2] == 'method'
+ assert objects['TopLevelType'][2] == 'type'
assert objects['NestedParentA'][2] == 'class'
+ assert objects['NestedParentA.NestedTypeA'][2] == 'type'
assert objects['NestedParentA.child_1'][2] == 'method'
assert objects['NestedParentA.any_child'][2] == 'method'
assert objects['NestedParentA.NestedChildA'][2] == 'class'
@@ -233,6 +240,9 @@ def test_domain_py_find_obj(app, status, warning):
assert (find_obj(None, None, 'NONEXISTANT', 'class') == [])
assert (find_obj(None, None, 'NestedParentA', 'class') ==
[('NestedParentA', ('roles', 'NestedParentA', 'class', False))])
+ assert (find_obj(None, None, 'NestedParentA.NestedTypeA', 'type') ==
+ [('NestedParentA.NestedTypeA',
+ ('roles', 'NestedParentA.NestedTypeA', 'type', False))])
assert (find_obj(None, None, 'NestedParentA.NestedChildA', 'class') ==
[('NestedParentA.NestedChildA',
('roles', 'NestedParentA.NestedChildA', 'class', False))])
@@ -360,6 +370,27 @@ def test_parse_annotation(app):
[desc_sig_punctuation, "]"]))
assert_node(doctree[0], pending_xref, refdomain="py", reftype="obj", reftarget="typing.Literal")
+ # Annotated type with callable gets parsed
+ doctree = _parse_annotation("Annotated[Optional[str], annotated_types.MaxLen(max_length=10)]", app.env)
+ assert_node(doctree, (
+ [pending_xref, 'Annotated'],
+ [desc_sig_punctuation, '['],
+ [pending_xref, 'str'],
+ [desc_sig_space, ' '],
+ [desc_sig_punctuation, '|'],
+ [desc_sig_space, ' '],
+ [pending_xref, 'None'],
+ [desc_sig_punctuation, ','],
+ [desc_sig_space, ' '],
+ [pending_xref, 'annotated_types.MaxLen'],
+ [desc_sig_punctuation, '('],
+ [desc_sig_name, 'max_length'],
+ [desc_sig_operator, '='],
+ [desc_sig_literal_number, '10'],
+ [desc_sig_punctuation, ')'],
+ [desc_sig_punctuation, ']'],
+ ))
+
def test_parse_annotation_suppress(app):
doctree = _parse_annotation("~typing.Dict[str, str]", app.env)
@@ -743,7 +774,7 @@ def test_function_pep_695(app):
S,\
T: int,\
U: (int, str),\
- R: int | int,\
+ R: int | str,\
A: int | Annotated[int, ctype("char")],\
*V,\
**P\
@@ -785,14 +816,29 @@ def test_function_pep_695(app):
desc_sig_space,
[desc_sig_punctuation, '|'],
desc_sig_space,
- [pending_xref, 'int'],
+ [pending_xref, 'str'],
)],
)],
[desc_type_parameter, (
[desc_sig_name, 'A'],
[desc_sig_punctuation, ':'],
desc_sig_space,
- [desc_sig_name, ([pending_xref, 'int | Annotated[int, ctype("char")]'])],
+ [desc_sig_name, (
+ [pending_xref, 'int'],
+ [desc_sig_space, ' '],
+ [desc_sig_punctuation, '|'],
+ [desc_sig_space, ' '],
+ [pending_xref, 'Annotated'],
+ [desc_sig_punctuation, '['],
+ [pending_xref, 'int'],
+ [desc_sig_punctuation, ','],
+ [desc_sig_space, ' '],
+ [pending_xref, 'ctype'],
+ [desc_sig_punctuation, '('],
+ [desc_sig_literal_string, "'char'"],
+ [desc_sig_punctuation, ')'],
+ [desc_sig_punctuation, ']'],
+ )],
)],
[desc_type_parameter, (
[desc_sig_operator, '*'],
@@ -977,7 +1023,7 @@ def test_class_def_pep_696(app):
('[T:(*Ts)|int]', '[T: (*Ts) | int]'),
('[T:(int|(*Ts))]', '[T: (int | (*Ts))]'),
('[T:((*Ts)|int)]', '[T: ((*Ts) | int)]'),
- ('[T:Annotated[int,ctype("char")]]', '[T: Annotated[int, ctype("char")]]'),
+ ("[T:Annotated[int,ctype('char')]]", "[T: Annotated[int, ctype('char')]]"),
])
def test_pep_695_and_pep_696_whitespaces_in_bound(app, tp_list, tptext):
text = f'.. py:function:: f{tp_list}()'
diff --git a/tests/test_domains/test_domain_py_pyobject.py b/tests/test_domains/test_domain_py_pyobject.py
index 04f9341..adc0453 100644
--- a/tests/test_domains/test_domain_py_pyobject.py
+++ b/tests/test_domains/test_domain_py_pyobject.py
@@ -2,6 +2,7 @@
from __future__ import annotations
+import pytest
from docutils import nodes
from sphinx import addnodes
@@ -362,6 +363,76 @@ def test_pyproperty(app):
assert domain.objects['Class.prop2'] == ('index', 'Class.prop2', 'property', False)
+def test_py_type_alias(app):
+ text = (".. py:module:: example\n"
+ ".. py:type:: Alias1\n"
+ " :canonical: list[str | int]\n"
+ "\n"
+ ".. py:class:: Class\n"
+ "\n"
+ " .. py:type:: Alias2\n"
+ " :canonical: int\n")
+ domain = app.env.get_domain('py')
+ doctree = restructuredtext.parse(app, text)
+ assert_node(doctree, (addnodes.index,
+ addnodes.index,
+ nodes.target,
+ [desc, ([desc_signature, ([desc_annotation, ('type', desc_sig_space)],
+ [desc_addname, 'example.'],
+ [desc_name, 'Alias1'],
+ [desc_annotation, (desc_sig_space,
+ [desc_sig_punctuation, '='],
+ desc_sig_space,
+ [pending_xref, 'list'],
+ [desc_sig_punctuation, '['],
+ [pending_xref, 'str'],
+ desc_sig_space,
+ [desc_sig_punctuation, '|'],
+ desc_sig_space,
+ [pending_xref, 'int'],
+ [desc_sig_punctuation, ']'],
+ )])],
+ [desc_content, ()])],
+ addnodes.index,
+ [desc, ([desc_signature, ([desc_annotation, ('class', desc_sig_space)],
+ [desc_addname, 'example.'],
+ [desc_name, 'Class'])],
+ [desc_content, (addnodes.index,
+ desc)])]))
+ assert_node(doctree[5][1][0], addnodes.index,
+ entries=[('single', 'Alias2 (type alias in example.Class)', 'example.Class.Alias2', '', None)])
+ assert_node(doctree[5][1][1], ([desc_signature, ([desc_annotation, ('type', desc_sig_space)],
+ [desc_name, 'Alias2'],
+ [desc_annotation, (desc_sig_space,
+ [desc_sig_punctuation, '='],
+ desc_sig_space,
+ [pending_xref, 'int'])])],
+ [desc_content, ()]))
+ assert 'example.Alias1' in domain.objects
+ assert domain.objects['example.Alias1'] == ('index', 'example.Alias1', 'type', False)
+ assert 'example.Class.Alias2' in domain.objects
+ assert domain.objects['example.Class.Alias2'] == ('index', 'example.Class.Alias2', 'type', False)
+
+
+@pytest.mark.sphinx('html', testroot='domain-py', freshenv=True)
+def test_domain_py_type_alias(app, status, warning):
+ app.build(force_all=True)
+
+ content = (app.outdir / 'type_alias.html').read_text(encoding='utf8')
+ assert ('<em class="property"><span class="pre">type</span><span class="w"> </span></em>'
+ '<span class="sig-prename descclassname"><span class="pre">module_one.</span></span>'
+ '<span class="sig-name descname"><span class="pre">MyAlias</span></span>'
+ '<em class="property"><span class="w"> </span><span class="p"><span class="pre">=</span></span>'
+ '<span class="w"> </span><span class="pre">list</span>'
+ '<span class="p"><span class="pre">[</span></span>'
+ '<span class="pre">int</span><span class="w"> </span>'
+ '<span class="p"><span class="pre">|</span></span><span class="w"> </span>'
+ '<a class="reference internal" href="#module_two.SomeClass" title="module_two.SomeClass">'
+ '<span class="pre">module_two.SomeClass</span></a>'
+ '<span class="p"><span class="pre">]</span></span></em>' in content)
+ assert warning.getvalue() == ''
+
+
def test_pydecorator_signature(app):
text = ".. py:decorator:: deco"
domain = app.env.get_domain('py')
diff --git a/tests/test_environment/test_environment.py b/tests/test_environment/test_environment.py
index 8a34457..82de597 100644
--- a/tests/test_environment/test_environment.py
+++ b/tests/test_environment/test_environment.py
@@ -34,7 +34,7 @@ def test_config_status(make_app, app_params):
assert app3.env.config_status == CONFIG_CHANGED
app3.build()
shutil.move(fname[:-4] + 'x.rst', fname)
- assert "[config changed ('root_doc')] 1 added" in app3._status.getvalue()
+ assert "[config changed ('master_doc')] 1 added" in app3._status.getvalue()
# incremental build (extension changed)
app4 = make_app(*args, confoverrides={'extensions': ['sphinx.ext.autodoc']}, **kwargs)
diff --git a/tests/test_environment/test_environment_indexentries.py b/tests/test_environment/test_environment_indexentries.py
index 4cfdc28..19cb5a7 100644
--- a/tests/test_environment/test_environment_indexentries.py
+++ b/tests/test_environment/test_environment_indexentries.py
@@ -22,22 +22,22 @@ def test_create_single_index(app):
restructuredtext.parse(app, text)
index = IndexEntries(app.env).create_index(app.builder)
assert len(index) == 6
- assert index[0] == ('Symbols', [('&-symbol', [[('', '#index-9')], [], None]),
- ('9-symbol', [[('', '#index-8')], [], None]),
- ('£100', [[('', '#index-10')], [], None])])
- assert index[1] == ('D', [('docutils', [[('', '#index-0')], [], None])])
- assert index[2] == ('P', [('pip', [[], [('install', [('', '#index-2')]),
- ('upgrade', [('', '#index-3')])], None]),
- ('Python', [[('', '#index-1')], [], None])])
- assert index[3] == ('S', [('Sphinx', [[('', '#index-4')], [], None])])
+ assert index[0] == ('Symbols', [('&-symbol', ([('', '#index-9')], [], None)),
+ ('9-symbol', ([('', '#index-8')], [], None)),
+ ('£100', ([('', '#index-10')], [], None))])
+ assert index[1] == ('D', [('docutils', ([('', '#index-0')], [], None))])
+ assert index[2] == ('P', [('pip', ([], [('install', [('', '#index-2')]),
+ ('upgrade', [('', '#index-3')])], None)),
+ ('Python', ([('', '#index-1')], [], None))])
+ assert index[3] == ('S', [('Sphinx', ([('', '#index-4')], [], None))])
assert index[4] == ('Е',
- [('ёлка', [[('', '#index-6')], [], None]),
- ('Ель', [[('', '#index-5')], [], None])])
+ [('ёлка', ([('', '#index-6')], [], None)),
+ ('Ель', ([('', '#index-5')], [], None))])
# Here the word starts with U+200F RIGHT-TO-LEFT MARK, which should be
# ignored when getting the first letter.
assert index[5] == ('\u05e2', [(
'\N{RIGHT-TO-LEFT MARK}\u05e2\u05d1\u05e8\u05d9\u05ea\N{LEFT-TO-RIGHT MARK}',
- [[('', '#index-7')], [], None],
+ ([('', '#index-7')], [], None),
)])
@@ -52,24 +52,24 @@ def test_create_pair_index(app):
restructuredtext.parse(app, text)
index = IndexEntries(app.env).create_index(app.builder)
assert len(index) == 7
- assert index[0] == ('Symbols', [(':+1:', [[], [('Sphinx', [('', '#index-3')])], None])])
+ assert index[0] == ('Symbols', [(':+1:', ([], [('Sphinx', [('', '#index-3')])], None))])
assert index[1] == ('D',
- [('documentation tool', [[], [('Sphinx', [('', '#index-2')])], None]),
- ('docutils', [[], [('reStructuredText', [('', '#index-0')])], None])])
- assert index[2] == ('I', [('interpreter', [[], [('Python', [('', '#index-1')])], None])])
- assert index[3] == ('P', [('Python', [[], [('interpreter', [('', '#index-1')])], None])])
+ [('documentation tool', ([], [('Sphinx', [('', '#index-2')])], None)),
+ ('docutils', ([], [('reStructuredText', [('', '#index-0')])], None))])
+ assert index[2] == ('I', [('interpreter', ([], [('Python', [('', '#index-1')])], None))])
+ assert index[3] == ('P', [('Python', ([], [('interpreter', [('', '#index-1')])], None))])
assert index[4] == ('R',
- [('reStructuredText', [[], [('docutils', [('', '#index-0')])], None])])
+ [('reStructuredText', ([], [('docutils', [('', '#index-0')])], None))])
assert index[5] == ('S',
- [('Sphinx', [[],
+ [('Sphinx', ([],
[(':+1:', [('', '#index-3')]),
('documentation tool', [('', '#index-2')]),
('ёлка', [('', '#index-5')]),
('Ель', [('', '#index-4')])],
- None])])
+ None))])
assert index[6] == ('Е',
- [('ёлка', [[], [('Sphinx', [('', '#index-5')])], None]),
- ('Ель', [[], [('Sphinx', [('', '#index-4')])], None])])
+ [('ёлка', ([], [('Sphinx', [('', '#index-5')])], None)),
+ ('Ель', ([], [('Sphinx', [('', '#index-4')])], None))])
@pytest.mark.sphinx('dummy', freshenv=True)
@@ -79,12 +79,12 @@ def test_create_triple_index(app):
restructuredtext.parse(app, text)
index = IndexEntries(app.env).create_index(app.builder)
assert len(index) == 5
- assert index[0] == ('B', [('bar', [[], [('baz, foo', [('', '#index-0')])], None]),
- ('baz', [[], [('foo bar', [('', '#index-0')])], None])])
- assert index[1] == ('F', [('foo', [[], [('bar baz', [('', '#index-0')])], None])])
- assert index[2] == ('P', [('Python', [[], [('Sphinx reST', [('', '#index-1')])], None])])
- assert index[3] == ('R', [('reST', [[], [('Python Sphinx', [('', '#index-1')])], None])])
- assert index[4] == ('S', [('Sphinx', [[], [('reST, Python', [('', '#index-1')])], None])])
+ assert index[0] == ('B', [('bar', ([], [('baz, foo', [('', '#index-0')])], None)),
+ ('baz', ([], [('foo bar', [('', '#index-0')])], None))])
+ assert index[1] == ('F', [('foo', ([], [('bar baz', [('', '#index-0')])], None))])
+ assert index[2] == ('P', [('Python', ([], [('Sphinx reST', [('', '#index-1')])], None))])
+ assert index[3] == ('R', [('reST', ([], [('Python Sphinx', [('', '#index-1')])], None))])
+ assert index[4] == ('S', [('Sphinx', ([], [('reST, Python', [('', '#index-1')])], None))])
@pytest.mark.sphinx('dummy', freshenv=True)
@@ -95,9 +95,9 @@ def test_create_see_index(app):
restructuredtext.parse(app, text)
index = IndexEntries(app.env).create_index(app.builder)
assert len(index) == 3
- assert index[0] == ('D', [('docutils', [[], [('see reStructuredText', [])], None])])
- assert index[1] == ('P', [('Python', [[], [('see interpreter', [])], None])])
- assert index[2] == ('S', [('Sphinx', [[], [('see documentation tool', [])], None])])
+ assert index[0] == ('D', [('docutils', ([], [('see reStructuredText', [])], None))])
+ assert index[1] == ('P', [('Python', ([], [('see interpreter', [])], None))])
+ assert index[2] == ('S', [('Sphinx', ([], [('see documentation tool', [])], None))])
@pytest.mark.sphinx('dummy', freshenv=True)
@@ -108,9 +108,9 @@ def test_create_seealso_index(app):
restructuredtext.parse(app, text)
index = IndexEntries(app.env).create_index(app.builder)
assert len(index) == 3
- assert index[0] == ('D', [('docutils', [[], [('see also reStructuredText', [])], None])])
- assert index[1] == ('P', [('Python', [[], [('see also interpreter', [])], None])])
- assert index[2] == ('S', [('Sphinx', [[], [('see also documentation tool', [])], None])])
+ assert index[0] == ('D', [('docutils', ([], [('see also reStructuredText', [])], None))])
+ assert index[1] == ('P', [('Python', ([], [('see also interpreter', [])], None))])
+ assert index[2] == ('S', [('Sphinx', ([], [('see also documentation tool', [])], None))])
@pytest.mark.sphinx('dummy', freshenv=True)
@@ -122,10 +122,10 @@ def test_create_main_index(app):
restructuredtext.parse(app, text)
index = IndexEntries(app.env).create_index(app.builder)
assert len(index) == 2
- assert index[0] == ('D', [('docutils', [[('main', '#index-0'),
- ('', '#index-1')], [], None])])
- assert index[1] == ('P', [('pip', [[], [('install', [('main', '#index-3'),
- ('', '#index-2')])], None])])
+ assert index[0] == ('D', [('docutils', ([('main', '#index-0'),
+ ('', '#index-1')], [], None))])
+ assert index[1] == ('P', [('pip', ([], [('install', [('main', '#index-3'),
+ ('', '#index-2')])], None))])
@pytest.mark.sphinx('dummy', freshenv=True)
@@ -140,9 +140,9 @@ def test_create_index_with_name(app):
# check index is created correctly
assert len(index) == 3
- assert index[0] == ('D', [('docutils', [[('', '#ref1')], [], None])])
- assert index[1] == ('P', [('Python', [[('', '#ref2')], [], None])])
- assert index[2] == ('S', [('Sphinx', [[('', '#index-0')], [], None])])
+ assert index[0] == ('D', [('docutils', ([('', '#ref1')], [], None))])
+ assert index[1] == ('P', [('Python', ([('', '#ref2')], [], None))])
+ assert index[2] == ('S', [('Sphinx', ([('', '#index-0')], [], None))])
# check the reference labels are created correctly
std = app.env.get_domain('std')
@@ -161,6 +161,6 @@ def test_create_index_by_key(app):
restructuredtext.parse(app, text)
index = IndexEntries(app.env).create_index(app.builder)
assert len(index) == 3
- assert index[0] == ('D', [('docutils', [[('main', '#term-docutils')], [], None])])
- assert index[1] == ('P', [('Python', [[('main', '#term-Python')], [], None])])
- assert index[2] == ('ス', [('スフィンクス', [[('main', '#term-0')], [], 'ス'])])
+ assert index[0] == ('D', [('docutils', ([('main', '#term-docutils')], [], None))])
+ assert index[1] == ('P', [('Python', ([('main', '#term-Python')], [], None))])
+ assert index[2] == ('ス', [('スフィンクス', ([('main', '#term-0')], [], 'ス'))])
diff --git a/tests/test_environment/test_environment_toctree.py b/tests/test_environment/test_environment_toctree.py
index 175c6ab..6979a12 100644
--- a/tests/test_environment/test_environment_toctree.py
+++ b/tests/test_environment/test_environment_toctree.py
@@ -132,7 +132,7 @@ def test_domain_objects(app):
assert app.env.toc_num_entries['index'] == 0
assert app.env.toc_num_entries['domains'] == 9
- assert app.env.toctree_includes['index'] == ['domains']
+ assert app.env.toctree_includes['index'] == ['domains', 'document_scoping']
assert 'index' in app.env.files_to_rebuild['domains']
assert app.env.glob_toctrees == set()
assert app.env.numbered_toctrees == {'index'}
@@ -161,6 +161,41 @@ def test_domain_objects(app):
[list_item, ([compact_paragraph, reference, literal, "HelloWorldPrinter.print()"])])
+@pytest.mark.sphinx('dummy', testroot='toctree-domain-objects')
+def test_domain_objects_document_scoping(app):
+ app.build()
+
+ # tocs
+ toctree = app.env.tocs['document_scoping']
+ assert_node(
+ toctree,
+ [bullet_list, list_item, (
+ compact_paragraph, # [0][0]
+ [bullet_list, ( # [0][1]
+ [list_item, compact_paragraph, reference, literal, 'ClassLevel1a'], # [0][1][0]
+ [list_item, ( # [0][1][1]
+ [compact_paragraph, reference, literal, 'ClassLevel1b'], # [0][1][1][0]
+ [bullet_list, list_item, compact_paragraph, reference, literal, 'ClassLevel1b.f()'], # [0][1][1][1][0]
+ )],
+ [list_item, compact_paragraph, reference, literal, 'ClassLevel1a.g()'], # [0][1][2]
+ [list_item, compact_paragraph, reference, literal, 'ClassLevel1b.g()'], # [0][1][3]
+ [list_item, ( # [0][1][4]
+ [compact_paragraph, reference, 'Level 2'], # [0][1][4][0]
+ [bullet_list, ( # [0][1][4][1]
+ [list_item, compact_paragraph, reference, literal, 'ClassLevel2a'], # [0][1][4][1][0]
+ [list_item, ( # [0][1][4][1][1]
+ [compact_paragraph, reference, literal, 'ClassLevel2b'], # [0][1][4][1][1][0]
+ [bullet_list, list_item, compact_paragraph, reference, literal, 'ClassLevel2b.f()'], # [0][1][4][1][1][1][0]
+ )],
+ [list_item, compact_paragraph, reference, literal, 'ClassLevel2a.g()'], # [0][1][4][1][2]
+ [list_item, compact_paragraph, reference, literal, 'ClassLevel2b.g()'], # [0][1][4][1][3]
+ )],
+ )],
+ )],
+ )],
+ )
+
+
@pytest.mark.sphinx('xml', testroot='toctree')
@pytest.mark.test_params(shared_result='test_environment_toctree_basic')
def test_document_toc(app):
diff --git a/tests/test_extensions/test_ext_apidoc.py b/tests/test_extensions/test_ext_apidoc.py
index c3c979f..13c43df 100644
--- a/tests/test_extensions/test_ext_apidoc.py
+++ b/tests/test_extensions/test_ext_apidoc.py
@@ -2,6 +2,7 @@
import os.path
from collections import namedtuple
+from pathlib import Path
import pytest
@@ -9,7 +10,7 @@ import sphinx.ext.apidoc
from sphinx.ext.apidoc import main as apidoc_main
-@pytest.fixture()
+@pytest.fixture
def apidoc(rootdir, tmp_path, apidoc_params):
_, kwargs = apidoc_params
coderoot = rootdir / kwargs.get('coderoot', 'test-root')
@@ -20,7 +21,7 @@ def apidoc(rootdir, tmp_path, apidoc_params):
return namedtuple('apidoc', 'coderoot,outdir')(coderoot, outdir)
-@pytest.fixture()
+@pytest.fixture
def apidoc_params(request):
pargs = {}
kwargs = {}
@@ -661,3 +662,23 @@ def test_no_duplicates(rootdir, tmp_path):
finally:
sphinx.ext.apidoc.PY_SUFFIXES = original_suffixes
+
+
+def test_remove_old_files(tmp_path: Path):
+ """Test that old files are removed when using the -r option.
+
+ Also ensure that pre-existing files are not re-written, if unchanged.
+ This is required to avoid unnecessary rebuilds.
+ """
+ module_dir = tmp_path / 'module'
+ module_dir.mkdir()
+ (module_dir / 'example.py').write_text('', encoding='utf8')
+ gen_dir = tmp_path / 'gen'
+ gen_dir.mkdir()
+ (gen_dir / 'other.rst').write_text('', encoding='utf8')
+ apidoc_main(['-o', str(gen_dir), str(module_dir)])
+ assert set(gen_dir.iterdir()) == {gen_dir / 'modules.rst', gen_dir / 'example.rst', gen_dir / 'other.rst'}
+ example_mtime = (gen_dir / 'example.rst').stat().st_mtime
+ apidoc_main(['--remove-old', '-o', str(gen_dir), str(module_dir)])
+ assert set(gen_dir.iterdir()) == {gen_dir / 'modules.rst', gen_dir / 'example.rst'}
+ assert (gen_dir / 'example.rst').stat().st_mtime == example_mtime
diff --git a/tests/test_extensions/test_ext_autodoc.py b/tests/test_extensions/test_ext_autodoc.py
index 54f81f2..e10850b 100644
--- a/tests/test_extensions/test_ext_autodoc.py
+++ b/tests/test_extensions/test_ext_autodoc.py
@@ -429,7 +429,7 @@ def _assert_getter_works(app, directive, objtype, name, attrs=(), **kw):
hooked_members = {s[1] for s in getattr_spy}
documented_members = {s[1] for s in processed_signatures}
for attr in attrs:
- fullname = '.'.join((name, attr))
+ fullname = f'{name}.{attr}'
assert attr in hooked_members
assert fullname not in documented_members, f'{fullname!r} not intercepted'
@@ -838,7 +838,7 @@ def test_autodoc_special_members(app):
"special-members": None,
}
if sys.version_info >= (3, 13, 0, 'alpha', 5):
- options["exclude-members"] = "__static_attributes__"
+ options["exclude-members"] = "__static_attributes__,__firstlineno__"
actual = do_autodoc(app, 'class', 'target.Class', options)
assert list(filter(lambda l: '::' in l, actual)) == [
'.. py:class:: Class(arg)',
@@ -1479,7 +1479,7 @@ class _EnumFormatter:
return self.entry(name, doc, role='attribute', indent=indent, **rst_options)
-@pytest.fixture()
+@pytest.fixture
def autodoc_enum_options() -> dict[str, object]:
"""Default autodoc options to use when testing enum's documentation."""
return {"members": None, "undoc-members": None}
@@ -2321,18 +2321,62 @@ def test_autodoc_TypeVar(app):
@pytest.mark.sphinx('html', testroot='ext-autodoc')
def test_autodoc_Annotated(app):
- options = {"members": None}
+ options = {'members': None, 'member-order': 'bysource'}
actual = do_autodoc(app, 'module', 'target.annotated', options)
assert list(actual) == [
'',
'.. py:module:: target.annotated',
'',
'',
- '.. py:function:: hello(name: str) -> None',
+ '.. py:class:: FuncValidator(func: function)',
+ ' :module: target.annotated',
+ '',
+ '',
+ '.. py:class:: MaxLen(max_length: int, whitelisted_words: list[str])',
+ ' :module: target.annotated',
+ '',
+ '',
+ '.. py:data:: ValidatedString',
+ ' :module: target.annotated',
+ '',
+ ' Type alias for a validated string.',
+ '',
+ ' alias of :py:class:`~typing.Annotated`\\ [:py:class:`str`, '
+ ':py:class:`~target.annotated.FuncValidator`\\ (func=\\ :py:class:`~target.annotated.validate`)]',
+ '',
+ '',
+ ".. py:function:: hello(name: ~typing.Annotated[str, 'attribute']) -> None",
+ ' :module: target.annotated',
+ '',
+ ' docstring',
+ '',
+ '',
+ '.. py:class:: AnnotatedAttributes()',
' :module: target.annotated',
'',
' docstring',
'',
+ '',
+ ' .. py:attribute:: AnnotatedAttributes.name',
+ ' :module: target.annotated',
+ " :type: ~typing.Annotated[str, 'attribute']",
+ '',
+ ' Docstring about the ``name`` attribute.',
+ '',
+ '',
+ ' .. py:attribute:: AnnotatedAttributes.max_len',
+ ' :module: target.annotated',
+ " :type: list[~typing.Annotated[str, ~target.annotated.MaxLen(max_length=10, whitelisted_words=['word_one', 'word_two'])]]",
+ '',
+ ' Docstring about the ``max_len`` attribute.',
+ '',
+ '',
+ ' .. py:attribute:: AnnotatedAttributes.validated',
+ ' :module: target.annotated',
+ ' :type: ~typing.Annotated[str, ~target.annotated.FuncValidator(func=~target.annotated.validate)]',
+ '',
+ ' Docstring about the ``validated`` attribute.',
+ '',
]
diff --git a/tests/test_extensions/test_ext_autodoc_automodule.py b/tests/test_extensions/test_ext_autodoc_automodule.py
index 92565ae..c6ced7e 100644
--- a/tests/test_extensions/test_ext_autodoc_automodule.py
+++ b/tests/test_extensions/test_ext_autodoc_automodule.py
@@ -4,7 +4,9 @@ This tests mainly the Documenters; the auto directives are tested in a test
source file translated by test_build.
"""
+import inspect
import sys
+import typing
import pytest
@@ -185,8 +187,22 @@ def test_automodule_inherited_members(app):
'sphinx.missing_module4']})
@pytest.mark.usefixtures("rollback_sysmodules")
def test_subclass_of_mocked_object(app):
+ from sphinx.ext.autodoc.mock import _MockObject
sys.modules.pop('target', None) # unload target module to clear the module cache
options = {'members': None}
actual = do_autodoc(app, 'module', 'target.need_mocks', options)
+ # ``typing.Any`` is not available at runtime on ``_MockObject.__new__``
+ assert '.. py:class:: Inherited(*args: Any, **kwargs: Any)' in actual
+
+ # make ``typing.Any`` available at runtime on ``_MockObject.__new__``
+ sig = inspect.signature(_MockObject.__new__)
+ parameters = sig.parameters.copy()
+ for name in ('args', 'kwargs'):
+ parameters[name] = parameters[name].replace(annotation=typing.Any)
+ sig = sig.replace(parameters=tuple(parameters.values()))
+ _MockObject.__new__.__signature__ = sig # type: ignore[attr-defined]
+
+ options = {'members': None}
+ actual = do_autodoc(app, 'module', 'target.need_mocks', options)
assert '.. py:class:: Inherited(*args: ~typing.Any, **kwargs: ~typing.Any)' in actual
diff --git a/tests/test_extensions/test_ext_autodoc_configs.py b/tests/test_extensions/test_ext_autodoc_configs.py
index 6c2af5a..1262b15 100644
--- a/tests/test_extensions/test_ext_autodoc_configs.py
+++ b/tests/test_extensions/test_ext_autodoc_configs.py
@@ -679,6 +679,10 @@ def test_autodoc_typehints_signature(app):
type_o = "~typing.Any | None"
else:
type_o = "~typing.Any"
+ if sys.version_info[:2] >= (3, 13):
+ type_ppp = "pathlib._local.PurePosixPath"
+ else:
+ type_ppp = "pathlib.PurePosixPath"
options = {"members": None,
"undoc-members": None}
@@ -703,7 +707,7 @@ def test_autodoc_typehints_signature(app):
'',
'.. py:data:: CONST3',
' :module: target.typehints',
- ' :type: ~pathlib.PurePosixPath',
+ f' :type: ~{type_ppp}',
" :value: PurePosixPath('/a/b/c')",
'',
' docstring',
@@ -726,7 +730,7 @@ def test_autodoc_typehints_signature(app):
'',
' .. py:attribute:: Math.CONST3',
' :module: target.typehints',
- ' :type: ~pathlib.PurePosixPath',
+ f' :type: ~{type_ppp}',
" :value: PurePosixPath('/a/b/c')",
'',
'',
@@ -748,7 +752,7 @@ def test_autodoc_typehints_signature(app):
'',
' .. py:property:: Math.path',
' :module: target.typehints',
- ' :type: ~pathlib.PurePosixPath',
+ f' :type: ~{type_ppp}',
'',
'',
' .. py:property:: Math.prop',
@@ -773,7 +777,7 @@ def test_autodoc_typehints_signature(app):
'',
' docstring',
'',
- " alias of TypeVar('T', bound=\\ :py:class:`~pathlib.PurePosixPath`)",
+ f" alias of TypeVar('T', bound=\\ :py:class:`~{type_ppp}`)",
'',
'',
'.. py:function:: complex_func(arg1: str, arg2: List[int], arg3: Tuple[int, '
@@ -802,6 +806,10 @@ def test_autodoc_typehints_signature(app):
@pytest.mark.sphinx('html', testroot='ext-autodoc',
confoverrides={'autodoc_typehints': "none"})
def test_autodoc_typehints_none(app):
+ if sys.version_info[:2] >= (3, 13):
+ type_ppp = "pathlib._local.PurePosixPath"
+ else:
+ type_ppp = "pathlib.PurePosixPath"
options = {"members": None,
"undoc-members": None}
actual = do_autodoc(app, 'module', 'target.typehints', options)
@@ -887,7 +895,7 @@ def test_autodoc_typehints_none(app):
'',
' docstring',
'',
- " alias of TypeVar('T', bound=\\ :py:class:`~pathlib.PurePosixPath`)",
+ f" alias of TypeVar('T', bound=\\ :py:class:`~{type_ppp}`)",
'',
'',
'.. py:function:: complex_func(arg1, arg2, arg3=None, *args, **kwargs)',
@@ -1417,7 +1425,10 @@ def test_autodoc_typehints_format_fully_qualified(app):
type_o = "typing.Any | None"
else:
type_o = "typing.Any"
-
+ if sys.version_info[:2] >= (3, 13):
+ type_ppp = "pathlib._local.PurePosixPath"
+ else:
+ type_ppp = "pathlib.PurePosixPath"
options = {"members": None,
"undoc-members": None}
actual = do_autodoc(app, 'module', 'target.typehints', options)
@@ -1441,7 +1452,7 @@ def test_autodoc_typehints_format_fully_qualified(app):
'',
'.. py:data:: CONST3',
' :module: target.typehints',
- ' :type: pathlib.PurePosixPath',
+ f' :type: {type_ppp}',
" :value: PurePosixPath('/a/b/c')",
'',
' docstring',
@@ -1464,7 +1475,7 @@ def test_autodoc_typehints_format_fully_qualified(app):
'',
' .. py:attribute:: Math.CONST3',
' :module: target.typehints',
- ' :type: pathlib.PurePosixPath',
+ f' :type: {type_ppp}',
" :value: PurePosixPath('/a/b/c')",
'',
'',
@@ -1486,7 +1497,7 @@ def test_autodoc_typehints_format_fully_qualified(app):
'',
' .. py:property:: Math.path',
' :module: target.typehints',
- ' :type: pathlib.PurePosixPath',
+ f' :type: {type_ppp}',
'',
'',
' .. py:property:: Math.prop',
@@ -1511,7 +1522,7 @@ def test_autodoc_typehints_format_fully_qualified(app):
'',
' docstring',
'',
- " alias of TypeVar('T', bound=\\ :py:class:`pathlib.PurePosixPath`)",
+ f" alias of TypeVar('T', bound=\\ :py:class:`{type_ppp}`)",
'',
'',
'.. py:function:: complex_func(arg1: str, arg2: List[int], arg3: Tuple[int, '
diff --git a/tests/test_extensions/test_ext_autosummary.py b/tests/test_extensions/test_ext_autosummary.py
index d761978..e3f034c 100644
--- a/tests/test_extensions/test_ext_autosummary.py
+++ b/tests/test_extensions/test_ext_autosummary.py
@@ -506,12 +506,20 @@ def test_autosummary_recursive(app, status, warning):
# Check content of recursively generated stub-files
content = (app.srcdir / 'generated' / 'package.rst').read_text(encoding='utf8')
- assert 'package.module' in content
- assert 'package.package' in content
- assert 'package.module_importfail' in content
+ assert 'module' in content
+ assert 'package' in content
+ assert 'module_importfail' in content
+ # we no longer generate fully-qualified module names.
+ assert 'package.module' not in content
+ assert 'package.package' not in content
+ assert 'package.module_importfail' not in content
content = (app.srcdir / 'generated' / 'package.package.rst').read_text(encoding='utf8')
- assert 'package.package.module' in content
+ assert 'module' in content
+ assert 'package.package.module' not in content
+
+ warnings = app.warning.getvalue()
+ assert 'Summarised items should not include the current module.' not in warnings
@pytest.mark.sphinx('dummy', testroot='ext-autosummary-recursive',
@@ -545,7 +553,7 @@ def test_autosummary_filename_map(app, status, warning):
@pytest.mark.sphinx('latex', **default_kw)
def test_autosummary_latex_table_colspec(app, status, warning):
app.build(force_all=True)
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
print(status.getvalue())
print(warning.getvalue())
assert r'\begin{longtable}{\X{1}{2}\X{1}{2}}' in result
@@ -599,11 +607,11 @@ def test_autosummary_imported_members(app, status, warning):
assert (' .. autosummary::\n'
' \n'
' Bar\n'
- ' \n' in module)
+ ' ' in module)
assert (' .. autosummary::\n'
' \n'
' foo\n'
- ' \n' in module)
+ ' ' in module)
finally:
sys.modules.pop('autosummary_dummy_package', None)
@@ -627,7 +635,7 @@ def test_autosummary_module_all(app, status, warning):
assert ('.. autosummary::\n'
' :toctree:\n'
' :recursive:\n\n'
- ' autosummary_dummy_package_all.extra_dummy_module\n\n' in module)
+ ' extra_dummy_module\n' in module)
finally:
sys.modules.pop('autosummary_dummy_package_all', None)
@@ -684,3 +692,17 @@ def test_autogen(rootdir, tmp_path):
args = ['-o', str(tmp_path), '-t', '.', 'autosummary_templating.txt']
autogen_main(args)
assert (tmp_path / 'sphinx.application.TemplateBridge.rst').exists()
+
+
+def test_autogen_remove_old(rootdir, tmp_path):
+ """Test the ``--remove-old`` option."""
+ tmp_path.joinpath('other.rst').write_text('old content')
+ with chdir(rootdir / 'test-templating'):
+ args = ['-o', str(tmp_path), '-t', '.', 'autosummary_templating.txt']
+ autogen_main(args)
+ assert set(tmp_path.iterdir()) == {
+ tmp_path / 'sphinx.application.TemplateBridge.rst',
+ tmp_path / 'other.rst'
+ }
+ autogen_main([*args, '--remove-old'])
+ assert set(tmp_path.iterdir()) == {tmp_path / 'sphinx.application.TemplateBridge.rst'}
diff --git a/tests/test_extensions/test_ext_autosummary_imports.py b/tests/test_extensions/test_ext_autosummary_imports.py
new file mode 100644
index 0000000..7420c99
--- /dev/null
+++ b/tests/test_extensions/test_ext_autosummary_imports.py
@@ -0,0 +1,49 @@
+"""Test autosummary for import cycles."""
+
+import pytest
+from docutils import nodes
+
+from sphinx import addnodes
+from sphinx.ext.autosummary import autosummary_table
+from sphinx.testing.util import assert_node
+
+
+@pytest.mark.sphinx('dummy', testroot='ext-autosummary-import_cycle')
+@pytest.mark.usefixtures("rollback_sysmodules")
+def test_autosummary_import_cycle(app, warning):
+ app.build()
+
+ doctree = app.env.get_doctree('index')
+ app.env.apply_post_transforms(doctree, 'index')
+
+ assert len(list(doctree.findall(nodes.reference))) == 1
+
+ assert_node(doctree,
+ (addnodes.index, # [0]
+ nodes.target, # [1]
+ nodes.paragraph, # [2]
+ addnodes.tabular_col_spec, # [3]
+ [autosummary_table, nodes.table, nodes.tgroup, (nodes.colspec, # [4][0][0][0]
+ nodes.colspec, # [4][0][0][1]
+ [nodes.tbody, nodes.row])], # [4][0][0][2][1]
+ addnodes.index, # [5]
+ addnodes.desc)) # [6]
+ assert_node(doctree[4][0][0][2][0],
+ ([nodes.entry, nodes.paragraph, (nodes.reference, nodes.Text)], nodes.entry))
+ assert_node(doctree[4][0][0][2][0][0][0][0], nodes.reference,
+ refid='spam.eggs.Ham', reftitle='spam.eggs.Ham')
+
+ expected = (
+ "Summarised items should not include the current module. "
+ "Replace 'spam.eggs.Ham' with 'Ham'."
+ )
+ assert expected in app.warning.getvalue()
+
+
+@pytest.mark.sphinx('dummy', testroot='ext-autosummary-module_prefix')
+@pytest.mark.usefixtures("rollback_sysmodules")
+def test_autosummary_generate_prefixes(app, warning):
+ app.build()
+ warnings = app.warning.getvalue()
+ assert 'Summarised items should not include the current module.' not in warnings
+ assert warnings == ''
diff --git a/tests/test_extensions/test_ext_coverage.py b/tests/test_extensions/test_ext_coverage.py
index c9e9ba9..ed7b5ad 100644
--- a/tests/test_extensions/test_ext_coverage.py
+++ b/tests/test_extensions/test_ext_coverage.py
@@ -10,8 +10,10 @@ def test_build(app, status, warning):
app.build(force_all=True)
py_undoc = (app.outdir / 'python.txt').read_text(encoding='utf8')
- assert py_undoc.startswith('Undocumented Python objects\n'
- '===========================\n')
+ assert py_undoc.startswith(
+ 'Undocumented Python objects\n'
+ '===========================\n',
+ )
assert 'autodoc_target\n--------------\n' in py_undoc
assert ' * Class -- missing methods:\n' in py_undoc
assert ' * raises\n' in py_undoc
@@ -23,8 +25,10 @@ def test_build(app, status, warning):
assert "undocumented py" not in status.getvalue()
c_undoc = (app.outdir / 'c.txt').read_text(encoding='utf8')
- assert c_undoc.startswith('Undocumented C API elements\n'
- '===========================\n')
+ assert c_undoc.startswith(
+ 'Undocumented C API elements\n'
+ '===========================\n',
+ )
assert 'api.h' in c_undoc
assert ' * Py_SphinxTest' in c_undoc
@@ -54,16 +58,26 @@ Undocumented Python objects
Statistics
----------
-+----------------------+----------+--------------+
-| Module | Coverage | Undocumented |
-+======================+==========+==============+
-| coverage_not_ignored | 0.00% | 2 |
-+----------------------+----------+--------------+
-| TOTAL | 0.00% | 2 |
-+----------------------+----------+--------------+
++---------------------------+----------+--------------+
+| Module | Coverage | Undocumented |
++===========================+==========+==============+
+| grog | 100.00% | 0 |
++---------------------------+----------+--------------+
+| grog.coverage_missing | 100.00% | 0 |
++---------------------------+----------+--------------+
+| grog.coverage_not_ignored | 0.00% | 2 |
++---------------------------+----------+--------------+
+| TOTAL | 0.00% | 2 |
++---------------------------+----------+--------------+
+
+grog.coverage_missing
+---------------------
-coverage_not_ignored
---------------------
+Classes:
+ * Missing
+
+grog.coverage_not_ignored
+-------------------------
Classes:
* Documented -- missing methods:
diff --git a/tests/test_extensions/test_ext_graphviz.py b/tests/test_extensions/test_ext_graphviz.py
index 866a92a..cd1fd92 100644
--- a/tests/test_extensions/test_ext_graphviz.py
+++ b/tests/test_extensions/test_ext_graphviz.py
@@ -105,7 +105,7 @@ def test_graphviz_svg_html(app, status, warning):
def test_graphviz_latex(app, status, warning):
app.build(force_all=True)
- content = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ content = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
macro = ('\\\\begin{figure}\\[htbp\\]\n\\\\centering\n\\\\capstart\n\n'
'\\\\sphinxincludegraphics\\[\\]{graphviz-\\w+.pdf}\n'
'\\\\caption{caption of graph}\\\\label{.*}\\\\end{figure}')
diff --git a/tests/test_extensions/test_ext_imgconverter.py b/tests/test_extensions/test_ext_imgconverter.py
index c1d2061..fee6593 100644
--- a/tests/test_extensions/test_ext_imgconverter.py
+++ b/tests/test_extensions/test_ext_imgconverter.py
@@ -5,7 +5,7 @@ import subprocess
import pytest
-@pytest.fixture()
+@pytest.fixture
def _if_converter_found(app):
image_converter = getattr(app.config, 'image_converter', '')
try:
@@ -24,7 +24,7 @@ def _if_converter_found(app):
def test_ext_imgconverter(app, status, warning):
app.build(force_all=True)
- content = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ content = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
# supported image (not converted)
assert '\\sphinxincludegraphics{{img}.pdf}' in content
diff --git a/tests/test_extensions/test_ext_imgmockconverter.py b/tests/test_extensions/test_ext_imgmockconverter.py
index 4c3c64e..c155274 100644
--- a/tests/test_extensions/test_ext_imgmockconverter.py
+++ b/tests/test_extensions/test_ext_imgmockconverter.py
@@ -7,7 +7,7 @@ import pytest
def test_ext_imgmockconverter(app, status, warning):
app.build(force_all=True)
- content = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ content = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
# check identical basenames give distinct files
assert '\\sphinxincludegraphics{{svgimg}.pdf}' in content
diff --git a/tests/test_extensions/test_ext_inheritance_diagram.py b/tests/test_extensions/test_ext_inheritance_diagram.py
index c13ccea..45a5ff0 100644
--- a/tests/test_extensions/test_ext_inheritance_diagram.py
+++ b/tests/test_extensions/test_ext_inheritance_diagram.py
@@ -251,7 +251,7 @@ def test_inheritance_diagram_svg_html(tmp_path, app):
def test_inheritance_diagram_latex(app, status, warning):
app.build(force_all=True)
- content = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ content = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
pattern = ('\\\\begin{figure}\\[htbp]\n\\\\centering\n\\\\capstart\n\n'
'\\\\sphinxincludegraphics\\[\\]{inheritance-\\w+.pdf}\n'
diff --git a/tests/test_extensions/test_ext_intersphinx.py b/tests/test_extensions/test_ext_intersphinx.py
index ef5a9b1..d475c60 100644
--- a/tests/test_extensions/test_ext_intersphinx.py
+++ b/tests/test_extensions/test_ext_intersphinx.py
@@ -7,10 +7,8 @@ import pytest
from docutils import nodes
from sphinx import addnodes
+from sphinx.builders.html import INVENTORY_FILENAME
from sphinx.ext.intersphinx import (
- INVENTORY_FILENAME,
- _get_safe_url,
- _strip_basic_auth,
fetch_inventory,
inspect_main,
load_mappings,
@@ -18,9 +16,14 @@ from sphinx.ext.intersphinx import (
normalize_intersphinx_mapping,
)
from sphinx.ext.intersphinx import setup as intersphinx_setup
+from sphinx.ext.intersphinx._load import _get_safe_url, _strip_basic_auth
from sphinx.util.console import strip_colors
-from tests.test_util.intersphinx_data import INVENTORY_V2, INVENTORY_V2_NO_VERSION
+from tests.test_util.intersphinx_data import (
+ INVENTORY_V2,
+ INVENTORY_V2_AMBIGUOUS_TERMS,
+ INVENTORY_V2_NO_VERSION,
+)
from tests.utils import http_server
@@ -46,8 +49,8 @@ def set_config(app, mapping):
app.config.intersphinx_disabled_reftypes = []
-@mock.patch('sphinx.ext.intersphinx.InventoryFile')
-@mock.patch('sphinx.ext.intersphinx._read_from_url')
+@mock.patch('sphinx.ext.intersphinx._load.InventoryFile')
+@mock.patch('sphinx.ext.intersphinx._load._read_from_url')
def test_fetch_inventory_redirection(_read_from_url, InventoryFile, app, status, warning): # NoQA: PT019
intersphinx_setup(app)
_read_from_url().readline.return_value = b'# Sphinx inventory version 2'
@@ -248,6 +251,24 @@ def test_missing_reference_stddomain(tmp_path, app, status, warning):
assert rn.astext() == 'The Julia Domain'
+def test_ambiguous_reference_warning(tmp_path, app, warning):
+ inv_file = tmp_path / 'inventory'
+ inv_file.write_bytes(INVENTORY_V2_AMBIGUOUS_TERMS)
+ set_config(app, {
+ 'cmd': ('https://docs.python.org/', str(inv_file)),
+ })
+
+ # load the inventory
+ normalize_intersphinx_mapping(app, app.config)
+ load_mappings(app)
+
+ # term reference (case insensitive)
+ node, contnode = fake_node('std', 'term', 'A TERM', 'A TERM')
+ missing_reference(app, app.env, node, contnode)
+
+ assert 'multiple matches found for std:term:A TERM' in warning.getvalue()
+
+
@pytest.mark.sphinx('html', testroot='ext-intersphinx-cppdomain')
def test_missing_reference_cppdomain(tmp_path, app, status, warning):
inv_file = tmp_path / 'inventory'
diff --git a/tests/test_extensions/test_ext_math.py b/tests/test_extensions/test_ext_math.py
index b673f83..80a5ae7 100644
--- a/tests/test_extensions/test_ext_math.py
+++ b/tests/test_extensions/test_ext_math.py
@@ -127,7 +127,7 @@ def test_math_number_all_mathjax(app, status, warning):
def test_math_number_all_latex(app, status, warning):
app.build()
- content = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ content = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
macro = (r'\\begin{equation\*}\s*'
r'\\begin{split}a\^2\+b\^2=c\^2\\end{split}\s*'
r'\\end{equation\*}')
@@ -170,7 +170,7 @@ def test_math_eqref_format_html(app, status, warning):
def test_math_eqref_format_latex(app, status, warning):
app.build(force_all=True)
- content = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ content = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
macro = (r'Referencing equation Eq.\\ref{equation:math:foo} and '
r'Eq.\\ref{equation:math:foo}.')
assert re.search(macro, content, re.DOTALL)
@@ -194,6 +194,24 @@ def test_mathjax_numfig_html(app, status, warning):
@pytest.mark.sphinx('html', testroot='ext-math',
+ confoverrides={'extensions': ['sphinx.ext.mathjax'],
+ 'numfig': True,
+ 'math_numfig': True,
+ 'math_numsep': '-'})
+def test_mathjax_numsep_html(app, status, warning):
+ app.build(force_all=True)
+
+ content = (app.outdir / 'math.html').read_text(encoding='utf8')
+ html = ('<div class="math notranslate nohighlight" id="equation-math-0">\n'
+ '<span class="eqno">(1-2)')
+ assert html in content
+ html = ('<p>Referencing equation <a class="reference internal" '
+ 'href="#equation-foo">(1-1)</a> and '
+ '<a class="reference internal" href="#equation-foo">(1-1)</a>.</p>')
+ assert html in content
+
+
+@pytest.mark.sphinx('html', testroot='ext-math',
confoverrides={'extensions': ['sphinx.ext.imgmath'],
'numfig': True,
'numfig_secnum_depth': 0,
diff --git a/tests/test_extensions/test_ext_todo.py b/tests/test_extensions/test_ext_todo.py
index 1903f9f..5acfcac 100644
--- a/tests/test_extensions/test_ext_todo.py
+++ b/tests/test_extensions/test_ext_todo.py
@@ -89,7 +89,7 @@ def test_todo_valid_link(app, status, warning):
# Ensure the LaTeX output is built.
app.build(force_all=True)
- content = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ content = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
# Look for the link to foo. Note that there are two of them because the
# source document uses todolist twice. We could equally well look for links
diff --git a/tests/test_extensions/test_ext_viewcode.py b/tests/test_extensions/test_ext_viewcode.py
index b2c6fc0..800904a 100644
--- a/tests/test_extensions/test_ext_viewcode.py
+++ b/tests/test_extensions/test_ext_viewcode.py
@@ -42,6 +42,7 @@ def check_viewcode_output(app, warning):
@pytest.mark.sphinx(testroot='ext-viewcode', freshenv=True,
confoverrides={"viewcode_line_numbers": True})
+@pytest.mark.usefixtures("rollback_sysmodules")
def test_viewcode_linenos(app, warning):
shutil.rmtree(app.outdir / '_modules', ignore_errors=True)
app.build(force_all=True)
@@ -52,6 +53,7 @@ def test_viewcode_linenos(app, warning):
@pytest.mark.sphinx(testroot='ext-viewcode', freshenv=True,
confoverrides={"viewcode_line_numbers": False})
+@pytest.mark.usefixtures("rollback_sysmodules")
def test_viewcode(app, warning):
shutil.rmtree(app.outdir / '_modules', ignore_errors=True)
app.build(force_all=True)
@@ -61,6 +63,7 @@ def test_viewcode(app, warning):
@pytest.mark.sphinx('epub', testroot='ext-viewcode')
+@pytest.mark.usefixtures("rollback_sysmodules")
def test_viewcode_epub_default(app, status, warning):
shutil.rmtree(app.outdir)
app.build(force_all=True)
@@ -73,6 +76,7 @@ def test_viewcode_epub_default(app, status, warning):
@pytest.mark.sphinx('epub', testroot='ext-viewcode',
confoverrides={'viewcode_enable_epub': True})
+@pytest.mark.usefixtures("rollback_sysmodules")
def test_viewcode_epub_enabled(app, status, warning):
app.build(force_all=True)
diff --git a/tests/test_intl/test_catalogs.py b/tests/test_intl/test_catalogs.py
index b7fd7be..70c78c5 100644
--- a/tests/test_intl/test_catalogs.py
+++ b/tests/test_intl/test_catalogs.py
@@ -5,7 +5,7 @@ from pathlib import Path
import pytest
-@pytest.fixture()
+@pytest.fixture
def _setup_test(app_params):
assert isinstance(app_params.kwargs['srcdir'], Path)
srcdir = app_params.kwargs['srcdir']
diff --git a/tests/test_intl/test_intl.py b/tests/test_intl/test_intl.py
index 6b1e9ba..e95a78b 100644
--- a/tests/test_intl/test_intl.py
+++ b/tests/test_intl/test_intl.py
@@ -180,8 +180,11 @@ def test_text_inconsistency_warnings(app, warning):
})
assert re.search(expected_warning_expr, warnings), f'{expected_warning_expr!r} did not match {warnings!r}'
+ expected_citation_ref_warning_expr = (
+ '.*/refs_inconsistency.txt:\\d+: WARNING: Citation \\[ref2\\] is not referenced.')
+ assert re.search(expected_citation_ref_warning_expr, warnings), f'{expected_citation_ref_warning_expr!r} did not match {warnings!r}'
+
expected_citation_warning_expr = (
- '.*/refs_inconsistency.txt:\\d+: WARNING: Citation \\[ref2\\] is not referenced.\n' +
'.*/refs_inconsistency.txt:\\d+: WARNING: citation not found: ref3')
assert re.search(expected_citation_warning_expr, warnings), f'{expected_citation_warning_expr!r} did not match {warnings!r}'
@@ -286,7 +289,7 @@ VVV
""")
assert result == expect
warnings = getwarning(warning)
- assert 'term not in glossary' not in warnings
+ assert warnings.count('term not in glossary') == 1
@sphinx_intl
@@ -298,7 +301,8 @@ def test_text_glossary_term_inconsistencies(app, warning):
result = (app.outdir / 'glossary_terms_inconsistency.txt').read_text(encoding='utf8')
expect = ("19. I18N WITH GLOSSARY TERMS INCONSISTENCY"
"\n******************************************\n"
- "\n1. LINK TO *SOME NEW TERM*.\n")
+ "\n1. LINK TO *SOME NEW TERM*.\n"
+ "\n2. LINK TO *TERM NOT IN GLOSSARY*.\n")
assert result == expect
warnings = getwarning(warning)
@@ -308,6 +312,10 @@ def test_text_glossary_term_inconsistencies(app, warning):
" original: \\[':term:`Some term`', ':term:`Some other term`'\\],"
" translated: \\[':term:`SOME NEW TERM`'\\]\n")
assert re.search(expected_warning_expr, warnings), f'{expected_warning_expr!r} did not match {warnings!r}'
+ expected_warning_expr = (
+ '.*/glossary_terms_inconsistency.txt:\\d+:<translated>:1: '
+ "WARNING: term not in glossary: 'TERM NOT IN GLOSSARY'")
+ assert re.search(expected_warning_expr, warnings), f'{expected_warning_expr!r} did not match {warnings!r}'
@sphinx_intl
@@ -729,7 +737,7 @@ class _MockUnixClock(_MockClock):
time.sleep(ds)
-@pytest.fixture()
+@pytest.fixture
def mock_time_and_i18n(
monkeypatch: pytest.MonkeyPatch,
) -> tuple[pytest.MonkeyPatch, _MockClock]:
@@ -930,6 +938,16 @@ def test_html_index_entries(app):
start_tag2 = "<%s[^>]*>" % childtag
return fr"{start_tag1}\s*{keyword}\s*{start_tag2}"
expected_exprs = [
+ wrap('h2', 'Symbols'),
+ wrap('h2', 'C'),
+ wrap('h2', 'E'),
+ wrap('h2', 'F'),
+ wrap('h2', 'M'),
+ wrap('h2', 'N'),
+ wrap('h2', 'R'),
+ wrap('h2', 'S'),
+ wrap('h2', 'T'),
+ wrap('h2', 'V'),
wrap('a', 'NEWSLETTER'),
wrap('a', 'MAILING LIST'),
wrap('a', 'RECIPIENTS LIST'),
@@ -1191,6 +1209,15 @@ def test_xml_role_xref(app):
['i18n-role-xref', 'index',
'glossary_terms#term-Some-term'])
+ sec1_1, = sec1.findall('section')
+ title, = sec1_1.findall('title')
+ assert_elem(
+ title,
+ ['LINK TO', "I18N ROCK'N ROLE XREF", ',', 'CONTENTS', ',',
+ 'SOME NEW TERM', '.'],
+ ['i18n-role-xref', 'index',
+ 'glossary_terms#term-Some-term'])
+
para2 = sec2.findall('paragraph')
assert_elem(
para2[0],
@@ -1231,7 +1258,7 @@ def test_xml_warnings(app, warning):
app.build()
# warnings
warnings = getwarning(warning)
- assert 'term not in glossary' not in warnings
+ assert warnings.count('term not in glossary') == 1
assert 'undefined label' not in warnings
assert 'unknown document' not in warnings
@@ -1294,6 +1321,19 @@ def test_xml_label_targets(app):
@sphinx_intl
+@pytest.mark.sphinx('xml')
+@pytest.mark.test_params(shared_result='test_intl_basic')
+def test_xml_strange_markup(app):
+ app.build()
+ et = etree_parse(app.outdir / 'markup.xml')
+ secs = et.findall('section')
+
+ subsec1, = secs[0].findall('section')
+ title1, = subsec1.findall('title')
+ assert_elem(title1, ['1. TITLE STARTING WITH 1.'])
+
+
+@sphinx_intl
@pytest.mark.sphinx('html')
@pytest.mark.test_params(shared_result='test_intl_basic')
def test_additional_targets_should_not_be_translated(app):
@@ -1377,6 +1417,15 @@ def test_additional_targets_should_be_translated(app):
# [literalblock.txt]
result = (app.outdir / 'literalblock.html').read_text(encoding='utf8')
+ # basic literal bloc should be translated
+ expected_expr = ('<span class="n">THIS</span> <span class="n">IS</span>\n'
+ '<span class="n">LITERAL</span> <span class="n">BLOCK</span>')
+ assert_count(expected_expr, result, 1)
+
+ # literalinclude should be translated
+ expected_expr = '<span class="s2">&quot;HTTPS://SPHINX-DOC.ORG&quot;</span>'
+ assert_count(expected_expr, result, 1)
+
# title should be translated
expected_expr = 'CODE-BLOCKS'
assert_count(expected_expr, result, 2)
@@ -1411,7 +1460,7 @@ def test_additional_targets_should_be_translated(app):
"""<span class="c1"># SYS IMPORTING</span>""")
assert_count(expected_expr, result, 1)
- # '#noqa' should remain in literal blocks.
+ # 'noqa' comments should remain in literal blocks.
assert_count("#noqa", result, 1)
# [raw.txt]
diff --git a/tests/test_markup/test_markup.py b/tests/test_markup/test_markup.py
index c933481..a23219c 100644
--- a/tests/test_markup/test_markup.py
+++ b/tests/test_markup/test_markup.py
@@ -21,7 +21,7 @@ from sphinx.writers.html import HTML5Translator, HTMLWriter
from sphinx.writers.latex import LaTeXTranslator, LaTeXWriter
-@pytest.fixture()
+@pytest.fixture
def settings(app):
texescape.init() # otherwise done by the latex builder
with warnings.catch_warnings():
@@ -42,7 +42,7 @@ def settings(app):
domain_context.disable()
-@pytest.fixture()
+@pytest.fixture
def new_document(settings):
def create():
document = utils.new_document('test data', settings)
@@ -52,14 +52,14 @@ def new_document(settings):
return create
-@pytest.fixture()
+@pytest.fixture
def inliner(new_document):
document = new_document()
document.reporter.get_source_and_line = lambda line=1: ('dummy.rst', line)
return SimpleNamespace(document=document, reporter=document.reporter)
-@pytest.fixture()
+@pytest.fixture
def parse(new_document):
def parse_(rst):
document = new_document()
@@ -90,7 +90,7 @@ class ForgivingLaTeXTranslator(LaTeXTranslator, ForgivingTranslator):
pass
-@pytest.fixture()
+@pytest.fixture
def verify_re_html(app, parse):
def verify(rst, html_expected):
document = parse(rst)
@@ -102,7 +102,7 @@ def verify_re_html(app, parse):
return verify
-@pytest.fixture()
+@pytest.fixture
def verify_re_latex(app, parse):
def verify(rst, latex_expected):
document = parse(rst)
@@ -117,7 +117,7 @@ def verify_re_latex(app, parse):
return verify
-@pytest.fixture()
+@pytest.fixture
def verify_re(verify_re_html, verify_re_latex):
def verify_re_(rst, html_expected, latex_expected):
if html_expected:
@@ -127,7 +127,7 @@ def verify_re(verify_re_html, verify_re_latex):
return verify_re_
-@pytest.fixture()
+@pytest.fixture
def verify(verify_re_html, verify_re_latex):
def verify_(rst, html_expected, latex_expected):
if html_expected:
@@ -137,7 +137,7 @@ def verify(verify_re_html, verify_re_latex):
return verify_
-@pytest.fixture()
+@pytest.fixture
def get_verifier(verify, verify_re):
v = {
'verify': verify,
diff --git a/tests/test_markup/test_smartquotes.py b/tests/test_markup/test_smartquotes.py
index 6c84386..b35f05f 100644
--- a/tests/test_markup/test_smartquotes.py
+++ b/tests/test_markup/test_smartquotes.py
@@ -41,7 +41,7 @@ def test_text_builder(app, status, warning):
def test_man_builder(app, status, warning):
app.build()
- content = (app.outdir / 'python.1').read_text(encoding='utf8')
+ content = (app.outdir / 'projectnamenotset.1').read_text(encoding='utf8')
assert r'\-\- \(dqSphinx\(dq is a tool that makes it easy ...' in content
@@ -49,7 +49,7 @@ def test_man_builder(app, status, warning):
def test_latex_builder(app, status, warning):
app.build()
- content = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ content = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
assert '\\textendash{} “Sphinx” is a tool that makes it easy …' in content
@@ -94,5 +94,5 @@ def test_smartquotes_excludes_language(app, status, warning):
def test_smartquotes_excludes_builders(app, status, warning):
app.build()
- content = (app.outdir / 'python.1').read_text(encoding='utf8')
+ content = (app.outdir / 'projectnamenotset.1').read_text(encoding='utf8')
assert '– “Sphinx” is a tool that makes it easy …' in content
diff --git a/tests/test_search.py b/tests/test_search.py
index 63443a8..3687911 100644
--- a/tests/test_search.py
+++ b/tests/test_search.py
@@ -11,6 +11,14 @@ from docutils.parsers import rst
from sphinx.search import IndexBuilder
+from tests.utils import TESTS_ROOT
+
+JAVASCRIPT_TEST_ROOTS = [
+ directory
+ for directory in (TESTS_ROOT / 'js' / 'roots').iterdir()
+ if (directory / 'conf.py').exists()
+]
+
class DummyEnvironment:
def __init__(self, version, domains):
@@ -67,6 +75,9 @@ section_title
.. test that comments are not indexed: boson
+another_title
+=============
+
test that non-comments are indexed: fermion
'''
@@ -164,6 +175,10 @@ def test_IndexBuilder():
'docname2_1': 'title2_1', 'docname2_2': 'title2_2'}
assert index._filenames == {'docname1_1': 'filename1_1', 'docname1_2': 'filename1_2',
'docname2_1': 'filename2_1', 'docname2_2': 'filename2_2'}
+ # note: element iteration order (sort order) is important when the index
+ # is frozen (serialized) during build -- however, the _mapping-related
+ # dictionaries below may be iterated in arbitrary order by Python at
+ # runtime.
assert index._mapping == {
'ar': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'},
'fermion': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'},
@@ -172,7 +187,10 @@ def test_IndexBuilder():
'index': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'},
'test': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'},
}
- assert index._title_mapping == {'section_titl': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'}}
+ assert index._title_mapping == {
+ 'another_titl': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'},
+ 'section_titl': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'},
+ }
assert index._objtypes == {}
assert index._objnames == {}
@@ -192,8 +210,14 @@ def test_IndexBuilder():
'non': [0, 1, 2, 3],
'test': [0, 1, 2, 3]},
'titles': ('title1_1', 'title1_2', 'title2_1', 'title2_2'),
- 'titleterms': {'section_titl': [0, 1, 2, 3]},
- 'alltitles': {'section_title': [(0, 'section-title'), (1, 'section-title'), (2, 'section-title'), (3, 'section-title')]},
+ 'titleterms': {
+ 'another_titl': [0, 1, 2, 3],
+ 'section_titl': [0, 1, 2, 3],
+ },
+ 'alltitles': {
+ 'another_title': [(0, 'another-title'), (1, 'another-title'), (2, 'another-title'), (3, 'another-title')],
+ 'section_title': [(0, None), (1, None), (2, None), (3, None)],
+ },
'indexentries': {},
}
assert index._objtypes == {('dummy1', 'objtype1'): 0, ('dummy2', 'objtype1'): 1}
@@ -234,7 +258,10 @@ def test_IndexBuilder():
'index': {'docname1_2', 'docname2_2'},
'test': {'docname1_2', 'docname2_2'},
}
- assert index._title_mapping == {'section_titl': {'docname1_2', 'docname2_2'}}
+ assert index._title_mapping == {
+ 'another_titl': {'docname1_2', 'docname2_2'},
+ 'section_titl': {'docname1_2', 'docname2_2'},
+ }
assert index._objtypes == {('dummy1', 'objtype1'): 0, ('dummy2', 'objtype1'): 1}
assert index._objnames == {0: ('dummy1', 'objtype1', 'objtype1'), 1: ('dummy2', 'objtype1', 'objtype1')}
@@ -253,8 +280,14 @@ def test_IndexBuilder():
'non': [0, 1],
'test': [0, 1]},
'titles': ('title1_2', 'title2_2'),
- 'titleterms': {'section_titl': [0, 1]},
- 'alltitles': {'section_title': [(0, 'section-title'), (1, 'section-title')]},
+ 'titleterms': {
+ 'another_titl': [0, 1],
+ 'section_titl': [0, 1],
+ },
+ 'alltitles': {
+ 'another_title': [(0, 'another-title'), (1, 'another-title')],
+ 'section_title': [(0, None), (1, None)],
+ },
'indexentries': {},
}
assert index._objtypes == {('dummy1', 'objtype1'): 0, ('dummy2', 'objtype1'): 1}
@@ -343,6 +376,19 @@ def assert_is_sorted(item, path: str):
assert_is_sorted(value, f'{path}.{key}')
elif isinstance(item, list):
if not is_title_tuple_type(item) and path not in lists_not_to_sort:
- assert item == sorted(item), f'{err_path} is not sorted'
+ # sort nulls last; http://stackoverflow.com/questions/19868767/
+ assert item == sorted(item, key=lambda x: (x is None, x)), f'{err_path} is not sorted'
for i, child in enumerate(item):
assert_is_sorted(child, f'{path}[{i}]')
+
+
+@pytest.mark.parametrize('directory', JAVASCRIPT_TEST_ROOTS)
+def test_check_js_search_indexes(make_app, sphinx_test_tempdir, directory):
+ app = make_app('html', srcdir=directory, builddir=sphinx_test_tempdir / directory.name)
+ app.build()
+
+ fresh_searchindex = (app.outdir / 'searchindex.js')
+ existing_searchindex = (TESTS_ROOT / 'js' / 'fixtures' / directory.name / 'searchindex.js')
+
+ msg = f"Search index fixture {existing_searchindex} does not match regenerated copy."
+ assert fresh_searchindex.read_bytes() == existing_searchindex.read_bytes(), msg
diff --git a/tests/test_theming/test_theming.py b/tests/test_theming/test_theming.py
index 867f8a0..680465b 100644
--- a/tests/test_theming/test_theming.py
+++ b/tests/test_theming/test_theming.py
@@ -109,10 +109,15 @@ def test_nested_zipped_theme(app, status, warning):
@pytest.mark.sphinx(testroot='theming', confoverrides={'html_theme': 'staticfiles'})
def test_staticfiles(app, status, warning):
app.build()
+ assert (app.outdir / '_static' / 'legacytmpl.html').exists()
+ assert (app.outdir / '_static' / 'legacytmpl.html').read_text(encoding='utf8') == (
+ '<!-- testing legacy _t static templates -->\n'
+ '<html><project>project name not set</project></html>'
+ )
assert (app.outdir / '_static' / 'staticimg.png').exists()
assert (app.outdir / '_static' / 'statictmpl.html').exists()
assert (app.outdir / '_static' / 'statictmpl.html').read_text(encoding='utf8') == (
- '<!-- testing static templates -->\n<html><project>Python</project></html>'
+ '<!-- testing static templates -->\n<html><project>Project name not set</project></html>'
)
result = (app.outdir / 'index.html').read_text(encoding='utf8')
diff --git a/tests/test_transforms/test_transforms_post_transforms.py b/tests/test_transforms/test_transforms_post_transforms.py
index c4e699b..4bd446b 100644
--- a/tests/test_transforms/test_transforms_post_transforms.py
+++ b/tests/test_transforms/test_transforms_post_transforms.py
@@ -89,7 +89,7 @@ class TestSigElementFallbackTransform:
"""Fixture returning an ordered view on the original value of :data:`!sphinx.addnodes.SIG_ELEMENTS`."""
return self._builtin_sig_elements
- @pytest.fixture()
+ @pytest.fixture
def document(
self, app: SphinxTestApp, builtin_sig_elements: tuple[type[addnodes.desc_sig_element], ...],
) -> nodes.document:
@@ -103,13 +103,13 @@ class TestSigElementFallbackTransform:
doc += addnodes.desc_inline('py')
return doc
- @pytest.fixture()
+ @pytest.fixture
def with_desc_sig_elements(self, value: Any) -> bool:
"""Dynamic fixture acting as the identity on booleans."""
assert isinstance(value, bool)
return value
- @pytest.fixture()
+ @pytest.fixture
def add_visitor_method_for(self, value: Any) -> list[str]:
"""Dynamic fixture acting as the identity on a list of strings."""
assert isinstance(value, list)
diff --git a/tests/test_transforms/test_transforms_post_transforms_code.py b/tests/test_transforms/test_transforms_post_transforms_code.py
index 4423d5b..96d5a0c 100644
--- a/tests/test_transforms/test_transforms_post_transforms_code.py
+++ b/tests/test_transforms/test_transforms_post_transforms_code.py
@@ -34,7 +34,7 @@ def test_trim_doctest_flags_disabled(app, status, warning):
def test_trim_doctest_flags_latex(app, status, warning):
app.build()
- result = (app.outdir / 'python.tex').read_text(encoding='utf8')
+ result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
assert 'FOO' not in result
assert 'BAR' in result
assert 'BAZ' not in result
diff --git a/tests/test_transforms/test_transforms_post_transforms_images.py b/tests/test_transforms/test_transforms_post_transforms_images.py
new file mode 100644
index 0000000..bb5d076
--- /dev/null
+++ b/tests/test_transforms/test_transforms_post_transforms_images.py
@@ -0,0 +1,46 @@
+from types import SimpleNamespace
+
+from docutils import nodes
+
+from sphinx.transforms.post_transforms.images import ImageConverter
+from sphinx.util.docutils import new_document
+
+WEBP_DATA = (
+ b'RIFF\xa8\x01\x00\x00WEBPVP8X\n\x00\x00\x00'
+ b"\x10\x00\x00\x00\x0f\x00\x00\x0f\x00\x00ALPH\xc3\x00\x00\x00\x01'"
+ b'\xa2\xa8\x91$\xe5z\xe7\x18_\xe7\xdf*\x99\x88\x98\xfftq\x8d\xe0'
+ b'&0\xe2\xe1\x8bw2\xc8\xc1\x11\\\x83+0\xe8\xb0x\x15\x8ex'
+ b'Q5\xc1\x08\x0c\x02O\x92\xa0j\xb0U\x19\x1c\xd6\xb6mF/N'
+ b'\xc6v<\xb6\xedw\xfb\xaf)\xae!\xa2\xffI\xd1\xfd\x8f\x90\xf7\xba'
+ b'DI$\x1b:%\x914\xf3\x14m\x0e\xc7\xd3\xe5\x16 \xf4\x0b\x14'
+ b'\xbe\x90\xe1\x83\xb7\x1a2\x9e6\x82\x7f\x1d)~Nv\x08\xfb\x88\x9e'
+ b'\xb3\x91\xef\x99sF\xe82\x82\xdb\xf8\xccH\xb2\xf7E0} \xfd'
+ b'6\x17\x8c!2V-\xa5\xd6k#\xbc]\xe3\xa5Y\x15\xd5\x9c\x81'
+ b'\xa4\xd9n\x96u\x8a\x181\x0f\x8a\xaa,P4\xfa0\x82\xdf\xbak'
+ b'PR)\xb5-\xcf\xe9T\x14\n\x01\x00\x00\x00VP8 \xbe\x00'
+ b'\x00\x00\x90\x02\x00\x9d\x01*\x10\x00\x10\x00\x03\x004%\xb0\x02t0'
+ b'O\x08\x85\x0c|\x03\x1d\x08,\xfd\xe8\x00\xfe\xfdt\xa0\xfd\x02\x9b\x1f'
+ b'\x8a\xf7C|\x9c7\xf6\xd2\x0c\xaf\xd3\xff5h\xe2\xee\xa7\xbd\xc9o'
+ b'\x1b\xf4\xaa\xc5c\xae\xba\x9f\x97\x84\xdfA\xa2;\xda[\xe4\xef\xf8\xcb'
+ b'\xf1\xbd\x7f\xe1\xaf\xfa?\xe5\t\xec\xf4\xbbf_\xff\xaa)\xd9\x7f\xc9'
+ b'l\xe7\x86\xe6\xac\x97\xb9\xe4\xc6\xf4\x93#\x8c_\xdd\x8f9U \x7f'
+ b'\x95O\xfc9\xf8\xffo\xd2k\x03\xe8\x9f\xbc\x83\x98fm\xb1\xd5\x13'
+ b'\xffv\x17\xe6\xb1\xfe]\x8a\xe4\x9fG\xbf\xb3\xfa\xbf\xfe\x1d\x1d\xf3\x12'
+ b'\x8f\xfe\\\xcf\xc1\xfa\xf9\x18\xc3\xbd\xcf\xcf\x1f\x919\xa0\x01\xfd\x9a\x01'
+ b'K1,\xde\xbc\xd9{\xaa\xac\x00\x00\x00'
+)
+
+
+def test_guess_mimetype_webp(tmp_path):
+ document = new_document('<source>')
+ document.settings.env = SimpleNamespace(app=SimpleNamespace(srcdir=tmp_path))
+ converter = ImageConverter(document)
+
+ file_webp = 'webp-image.webp'
+ image = nodes.image(uri=file_webp, candidates={'*': file_webp})
+ assert converter.guess_mimetypes(image) == ['image/webp']
+
+ file_dat = 'webp-image.dat'
+ tmp_path.joinpath(file_dat).write_bytes(WEBP_DATA)
+ image = nodes.image(uri=file_dat, candidates={'*': file_dat})
+ assert converter.guess_mimetypes(image) == ['image/webp']
diff --git a/tests/test_util/intersphinx_data.py b/tests/test_util/intersphinx_data.py
index 042ee76..95cf80a 100644
--- a/tests/test_util/intersphinx_data.py
+++ b/tests/test_util/intersphinx_data.py
@@ -50,3 +50,15 @@ INVENTORY_V2_NO_VERSION: Final[bytes] = b'''\
''' + zlib.compress(b'''\
module1 py:module 0 foo.html#module-module1 Long Module desc
''')
+
+INVENTORY_V2_AMBIGUOUS_TERMS: Final[bytes] = b'''\
+# Sphinx inventory version 2
+# Project: foo
+# Version: 2.0
+# The remainder of this file is compressed with zlib.
+''' + zlib.compress(b'''\
+a term std:term -1 glossary.html#term-a-term -
+A term std:term -1 glossary.html#term-a-term -
+b term std:term -1 document.html#id5 -
+B term std:term -1 document.html#B -
+''')
diff --git a/tests/test_util/test_util_docutils_sphinx_directive.py b/tests/test_util/test_util_docutils_sphinx_directive.py
new file mode 100644
index 0000000..8f5ab3f
--- /dev/null
+++ b/tests/test_util/test_util_docutils_sphinx_directive.py
@@ -0,0 +1,139 @@
+from __future__ import annotations
+
+from types import SimpleNamespace
+
+from docutils import nodes
+from docutils.parsers.rst.languages import en as english # type: ignore[attr-defined]
+from docutils.parsers.rst.states import Inliner, RSTState, RSTStateMachine, state_classes
+from docutils.statemachine import StringList
+
+from sphinx.util.docutils import SphinxDirective, new_document
+
+
+def make_directive(*, env: SimpleNamespace, input_lines: StringList | None = None) -> SphinxDirective:
+ _, directive = make_directive_and_state(env=env, input_lines=input_lines)
+ return directive
+
+
+def make_directive_and_state(*, env: SimpleNamespace, input_lines: StringList | None = None) -> tuple[RSTState, SphinxDirective]:
+ sm = RSTStateMachine(state_classes, initial_state='Body')
+ sm.reporter = object()
+ if input_lines is not None:
+ sm.input_lines = input_lines
+ state = RSTState(sm)
+ state.document = new_document('<tests>')
+ state.document.settings.env = env
+ state.document.settings.tab_width = 4
+ state.document.settings.pep_references = None
+ state.document.settings.rfc_references = None
+ inliner = Inliner()
+ inliner.init_customizations(state.document.settings)
+ state.inliner = inliner
+ state.parent = None
+ state.memo = SimpleNamespace(
+ document=state.document,
+ language=english,
+ inliner=state.inliner,
+ reporter=state.document.reporter,
+ section_level=0,
+ title_styles=[],
+ )
+ directive = SphinxDirective(
+ name='test_directive',
+ arguments=[],
+ options={},
+ content=StringList(),
+ lineno=0,
+ content_offset=0,
+ block_text='',
+ state=state,
+ state_machine=state.state_machine,
+ )
+ return state, directive
+
+
+def test_sphinx_directive_env():
+ state, directive = make_directive_and_state(env=SimpleNamespace())
+
+ assert hasattr(directive, 'env')
+ assert directive.env is state.document.settings.env
+
+
+def test_sphinx_directive_config():
+ env = SimpleNamespace(config=object())
+ state, directive = make_directive_and_state(env=env)
+
+ assert hasattr(directive, 'config')
+ assert directive.config is directive.env.config
+ assert directive.config is state.document.settings.env.config
+
+
+def test_sphinx_directive_get_source_info():
+ env = SimpleNamespace()
+ input_lines = StringList(['spam'], source='<source>')
+ directive = make_directive(env=env, input_lines=input_lines)
+
+ assert directive.get_source_info() == ('<source>', 1)
+
+
+def test_sphinx_directive_set_source_info():
+ env = SimpleNamespace()
+ input_lines = StringList(['spam'], source='<source>')
+ directive = make_directive(env=env, input_lines=input_lines)
+
+ node = nodes.Element()
+ directive.set_source_info(node)
+ assert node.source == '<source>'
+ assert node.line == 1
+
+
+def test_sphinx_directive_get_location():
+ env = SimpleNamespace()
+ input_lines = StringList(['spam'], source='<source>')
+ directive = make_directive(env=env, input_lines=input_lines)
+
+ assert directive.get_location() == '<source>:1'
+
+
+def test_sphinx_directive_parse_content_to_nodes():
+ directive = make_directive(env=SimpleNamespace())
+ content = 'spam\n====\n\nEggs! *Lobster thermidor.*'
+ directive.content = StringList(content.split('\n'), source='<source>')
+
+ parsed = directive.parse_content_to_nodes(allow_section_headings=True)
+ assert len(parsed) == 1
+ node = parsed[0]
+ assert isinstance(node, nodes.section)
+ assert len(node.children) == 2
+ assert isinstance(node.children[0], nodes.title)
+ assert node.children[0].astext() == 'spam'
+ assert isinstance(node.children[1], nodes.paragraph)
+ assert node.children[1].astext() == 'Eggs! Lobster thermidor.'
+
+
+def test_sphinx_directive_parse_text_to_nodes():
+ directive = make_directive(env=SimpleNamespace())
+ content = 'spam\n====\n\nEggs! *Lobster thermidor.*'
+
+ parsed = directive.parse_text_to_nodes(content, allow_section_headings=True)
+ assert len(parsed) == 1
+ node = parsed[0]
+ assert isinstance(node, nodes.section)
+ assert len(node.children) == 2
+ assert isinstance(node.children[0], nodes.title)
+ assert node.children[0].astext() == 'spam'
+ assert isinstance(node.children[1], nodes.paragraph)
+ assert node.children[1].astext() == 'Eggs! Lobster thermidor.'
+
+
+def test_sphinx_directive_parse_inline():
+ directive = make_directive(env=SimpleNamespace())
+ content = 'Eggs! *Lobster thermidor.*'
+
+ parsed, messages = directive.parse_inline(content)
+ assert len(parsed) == 2
+ assert messages == []
+ assert parsed[0] == nodes.Text('Eggs! ')
+ assert isinstance(parsed[1], nodes.emphasis)
+ assert parsed[1].rawsource == '*Lobster thermidor.*'
+ assert parsed[1][0] == nodes.Text('Lobster thermidor.')
diff --git a/tests/test_util/test_util_fileutil.py b/tests/test_util/test_util_fileutil.py
index 9c23821..2071fc3 100644
--- a/tests/test_util/test_util_fileutil.py
+++ b/tests/test_util/test_util_fileutil.py
@@ -2,8 +2,11 @@
from unittest import mock
+import pytest
+
from sphinx.jinja2glue import BuiltinTemplateLoader
-from sphinx.util.fileutil import copy_asset, copy_asset_file
+from sphinx.util import strip_colors
+from sphinx.util.fileutil import _template_basename, copy_asset, copy_asset_file
class DummyTemplateLoader(BuiltinTemplateLoader):
@@ -28,9 +31,9 @@ def test_copy_asset_file(tmp_path):
assert src.read_text(encoding='utf8') == dest.read_text(encoding='utf8')
# copy template file
- src = (tmp_path / 'asset.txt_t')
+ src = (tmp_path / 'asset.txt.jinja')
src.write_text('# {{var1}} data', encoding='utf8')
- dest = (tmp_path / 'output.txt_t')
+ dest = (tmp_path / 'output.txt.jinja')
copy_asset_file(str(src), str(dest), {'var1': 'template'}, renderer)
assert not dest.exists()
@@ -38,7 +41,7 @@ def test_copy_asset_file(tmp_path):
assert (tmp_path / 'output.txt').read_text(encoding='utf8') == '# template data'
# copy template file to subdir
- src = (tmp_path / 'asset.txt_t')
+ src = (tmp_path / 'asset.txt.jinja')
src.write_text('# {{var1}} data', encoding='utf8')
subdir1 = (tmp_path / 'subdir')
subdir1.mkdir(parents=True, exist_ok=True)
@@ -48,14 +51,14 @@ def test_copy_asset_file(tmp_path):
assert (subdir1 / 'asset.txt').read_text(encoding='utf8') == '# template data'
# copy template file without context
- src = (tmp_path / 'asset.txt_t')
+ src = (tmp_path / 'asset.txt.jinja')
subdir2 = (tmp_path / 'subdir2')
subdir2.mkdir(parents=True, exist_ok=True)
copy_asset_file(src, subdir2)
assert not (subdir2 / 'asset.txt').exists()
- assert (subdir2 / 'asset.txt_t').exists()
- assert (subdir2 / 'asset.txt_t').read_text(encoding='utf8') == '# {{var1}} data'
+ assert (subdir2 / 'asset.txt.jinja').exists()
+ assert (subdir2 / 'asset.txt.jinja').read_text(encoding='utf8') == '# {{var1}} data'
def test_copy_asset(tmp_path):
@@ -65,12 +68,12 @@ def test_copy_asset(tmp_path):
source = (tmp_path / 'source')
source.mkdir(parents=True, exist_ok=True)
(source / 'index.rst').write_text('index.rst', encoding='utf8')
- (source / 'foo.rst_t').write_text('{{var1}}.rst', encoding='utf8')
+ (source / 'foo.rst.jinja').write_text('{{var1}}.rst', encoding='utf8')
(source / '_static').mkdir(parents=True, exist_ok=True)
(source / '_static' / 'basic.css').write_text('basic.css', encoding='utf8')
(source / '_templates').mkdir(parents=True, exist_ok=True)
(source / '_templates' / 'layout.html').write_text('layout.html', encoding='utf8')
- (source / '_templates' / 'sidebar.html_t').write_text('sidebar: {{var2}}', encoding='utf8')
+ (source / '_templates' / 'sidebar.html.jinja').write_text('sidebar: {{var2}}', encoding='utf8')
# copy a single file
assert not (tmp_path / 'test1').exists()
@@ -101,3 +104,25 @@ def test_copy_asset(tmp_path):
assert not (destdir / '_static' / 'basic.css').exists()
assert (destdir / '_templates' / 'layout.html').exists()
assert not (destdir / '_templates' / 'sidebar.html').exists()
+
+
+@pytest.mark.sphinx('html', testroot='util-copyasset_overwrite')
+def test_copy_asset_overwrite(app):
+ app.build()
+ src = app.srcdir / 'myext_static' / 'custom-styles.css'
+ dst = app.outdir / '_static' / 'custom-styles.css'
+ assert (
+ f'Copying the source path {src} to {dst} will overwrite data, '
+ 'as a file already exists at the destination path '
+ 'and the content does not match.\n'
+ ) in strip_colors(app.status.getvalue())
+
+
+def test_template_basename():
+ assert _template_basename('asset.txt') is None
+ assert _template_basename('asset.txt.jinja') == 'asset.txt'
+ assert _template_basename('sidebar.html.jinja') == 'sidebar.html'
+
+
+def test_legacy_template_basename():
+ assert _template_basename('asset.txt_t') == 'asset.txt'
diff --git a/tests/test_util/test_util_i18n.py b/tests/test_util/test_util_i18n.py
index f6baa04..f2f3249 100644
--- a/tests/test_util/test_util_i18n.py
+++ b/tests/test_util/test_util_i18n.py
@@ -75,16 +75,10 @@ def test_format_date():
format = '%x'
assert i18n.format_date(format, date=datet, language='en') == 'Feb 7, 2016'
format = '%X'
- if BABEL_VERSION >= (2, 12):
- assert i18n.format_date(format, date=datet, language='en') == '5:11:17\u202fAM'
- else:
- assert i18n.format_date(format, date=datet, language='en') == '5:11:17 AM'
+ assert i18n.format_date(format, date=datet, language='en') == '5:11:17\u202fAM'
assert i18n.format_date(format, date=date, language='en') == 'Feb 7, 2016'
format = '%c'
- if BABEL_VERSION >= (2, 12):
- assert i18n.format_date(format, date=datet, language='en') == 'Feb 7, 2016, 5:11:17\u202fAM'
- else:
- assert i18n.format_date(format, date=datet, language='en') == 'Feb 7, 2016, 5:11:17 AM'
+ assert i18n.format_date(format, date=datet, language='en') == 'Feb 7, 2016, 5:11:17\u202fAM'
assert i18n.format_date(format, date=date, language='en') == 'Feb 7, 2016'
# timezone
diff --git a/tests/test_util/test_util_inspect.py b/tests/test_util/test_util_inspect.py
index 32840b8..764ca20 100644
--- a/tests/test_util/test_util_inspect.py
+++ b/tests/test_util/test_util_inspect.py
@@ -359,6 +359,10 @@ def test_signature_annotations():
sig = inspect.signature(mod.f25)
assert stringify_signature(sig) == '(a, b, /)'
+ # collapse Literal types
+ sig = inspect.signature(mod.f26)
+ assert stringify_signature(sig) == "(x: typing.Literal[1, 2, 3] = 1, y: typing.Literal['a', 'b'] = 'a') -> None"
+
def test_signature_from_str_basic():
signature = '(a, b, *args, c=0, d="blah", **kwargs)'
@@ -662,7 +666,7 @@ def test_getslots():
__slots__ = {'attr': 'docstring'}
class Qux:
- __slots__ = 'attr'
+ __slots__ = 'attr' # NoQA: PLC0205
assert inspect.getslots(Foo) is None
assert inspect.getslots(Bar) == {'attr': None}
diff --git a/tests/test_util/test_util_inventory.py b/tests/test_util/test_util_inventory.py
index 81d31b0..211dc17 100644
--- a/tests/test_util/test_util_inventory.py
+++ b/tests/test_util/test_util_inventory.py
@@ -10,6 +10,7 @@ from sphinx.util.inventory import InventoryFile
from tests.test_util.intersphinx_data import (
INVENTORY_V1,
INVENTORY_V2,
+ INVENTORY_V2_AMBIGUOUS_TERMS,
INVENTORY_V2_NO_VERSION,
)
@@ -48,6 +49,24 @@ def test_read_inventory_v2_not_having_version():
('foo', '', '/util/foo.html#module-module1', 'Long Module desc')
+def test_ambiguous_definition_warning(warning, status):
+ f = BytesIO(INVENTORY_V2_AMBIGUOUS_TERMS)
+ InventoryFile.load(f, '/util', posixpath.join)
+
+ def _multiple_defs_notice_for(entity: str) -> str:
+ return f'contains multiple definitions for {entity}'
+
+ # was warning-level; reduced to info-level - see https://github.com/sphinx-doc/sphinx/issues/12613
+ mult_defs_a, mult_defs_b = (
+ _multiple_defs_notice_for('std:term:a'),
+ _multiple_defs_notice_for('std:term:b'),
+ )
+ assert mult_defs_a not in warning.getvalue().lower()
+ assert mult_defs_a not in status.getvalue().lower()
+ assert mult_defs_b not in warning.getvalue().lower()
+ assert mult_defs_b in status.getvalue().lower()
+
+
def _write_appconfig(dir, language, prefix=None):
prefix = prefix or language
os.makedirs(dir / prefix, exist_ok=True)
diff --git a/tests/test_util/test_util_typing.py b/tests/test_util/test_util_typing.py
index 9c28029..956cffe 100644
--- a/tests/test_util/test_util_typing.py
+++ b/tests/test_util/test_util_typing.py
@@ -1,6 +1,9 @@
"""Tests util.typing functions."""
+import dataclasses
import sys
+import typing as t
+from collections import abc
from contextvars import Context, ContextVar, Token
from enum import Enum
from numbers import Integral
@@ -28,12 +31,12 @@ from types import (
WrapperDescriptorType,
)
from typing import (
+ Annotated,
Any,
- Callable,
Dict,
- Generator,
- Iterator,
+ ForwardRef,
List,
+ Literal,
NewType,
Optional,
Tuple,
@@ -71,6 +74,11 @@ class BrokenType:
__args__ = int
+@dataclasses.dataclass(frozen=True)
+class Gt:
+ gt: float
+
+
def test_restify():
assert restify(int) == ":py:class:`int`"
assert restify(int, "smart") == ":py:class:`int`"
@@ -173,20 +181,36 @@ def test_restify_type_hints_containers():
assert restify(MyList[Tuple[int, int]]) == (":py:class:`tests.test_util.test_util_typing.MyList`\\ "
"[:py:class:`~typing.Tuple`\\ "
"[:py:class:`int`, :py:class:`int`]]")
- assert restify(Generator[None, None, None]) == (":py:class:`~typing.Generator`\\ "
- "[:py:obj:`None`, :py:obj:`None`, "
- ":py:obj:`None`]")
- assert restify(Iterator[None]) == (":py:class:`~typing.Iterator`\\ "
- "[:py:obj:`None`]")
+ assert restify(t.Generator[None, None, None]) == (":py:class:`~typing.Generator`\\ "
+ "[:py:obj:`None`, :py:obj:`None`, "
+ ":py:obj:`None`]")
+ assert restify(abc.Generator[None, None, None]) == (":py:class:`collections.abc.Generator`\\ "
+ "[:py:obj:`None`, :py:obj:`None`, "
+ ":py:obj:`None`]")
+ assert restify(t.Iterator[None]) == (":py:class:`~typing.Iterator`\\ "
+ "[:py:obj:`None`]")
+ assert restify(abc.Iterator[None]) == (":py:class:`collections.abc.Iterator`\\ "
+ "[:py:obj:`None`]")
-def test_restify_type_hints_Callable():
- assert restify(Callable) == ":py:class:`~typing.Callable`"
+def test_restify_Annotated():
+ assert restify(Annotated[str, "foo", "bar"]) == ":py:class:`~typing.Annotated`\\ [:py:class:`str`, 'foo', 'bar']"
+ assert restify(Annotated[str, "foo", "bar"], 'smart') == ":py:class:`~typing.Annotated`\\ [:py:class:`str`, 'foo', 'bar']"
+ assert restify(Annotated[float, Gt(-10.0)]) == ':py:class:`~typing.Annotated`\\ [:py:class:`float`, :py:class:`tests.test_util.test_util_typing.Gt`\\ (gt=\\ -10.0)]'
+ assert restify(Annotated[float, Gt(-10.0)], 'smart') == ':py:class:`~typing.Annotated`\\ [:py:class:`float`, :py:class:`~tests.test_util.test_util_typing.Gt`\\ (gt=\\ -10.0)]'
- assert restify(Callable[[str], int]) == (":py:class:`~typing.Callable`\\ "
- "[[:py:class:`str`], :py:class:`int`]")
- assert restify(Callable[..., int]) == (":py:class:`~typing.Callable`\\ "
- "[[...], :py:class:`int`]")
+
+def test_restify_type_hints_Callable():
+ assert restify(t.Callable) == ":py:class:`~typing.Callable`"
+ assert restify(t.Callable[[str], int]) == (":py:class:`~typing.Callable`\\ "
+ "[[:py:class:`str`], :py:class:`int`]")
+ assert restify(t.Callable[..., int]) == (":py:class:`~typing.Callable`\\ "
+ "[[...], :py:class:`int`]")
+ assert restify(abc.Callable) == ":py:class:`collections.abc.Callable`"
+ assert restify(abc.Callable[[str], int]) == (":py:class:`collections.abc.Callable`\\ "
+ "[[:py:class:`str`], :py:class:`int`]")
+ assert restify(abc.Callable[..., int]) == (":py:class:`collections.abc.Callable`\\ "
+ "[[...], :py:class:`int`]")
def test_restify_type_hints_Union():
@@ -276,7 +300,6 @@ def test_restify_type_hints_alias():
def test_restify_type_ForwardRef():
- from typing import ForwardRef # type: ignore[attr-defined]
assert restify(ForwardRef("MyInt")) == ":py:class:`MyInt`"
assert restify(list[ForwardRef("MyInt")]) == ":py:class:`list`\\ [:py:class:`MyInt`]"
@@ -285,7 +308,6 @@ def test_restify_type_ForwardRef():
def test_restify_type_Literal():
- from typing import Literal # type: ignore[attr-defined]
assert restify(Literal[1, "2", "\r"]) == ":py:obj:`~typing.Literal`\\ [1, '2', '\\r']"
assert restify(Literal[MyEnum.a], 'fully-qualified-except-typing') == ':py:obj:`~typing.Literal`\\ [:py:attr:`tests.test_util.test_util_typing.MyEnum.a`]'
@@ -317,6 +339,30 @@ def test_restify_pep_585():
":py:class:`int`]")
+def test_restify_Unpack():
+ from typing_extensions import Unpack as UnpackCompat
+
+ class X(t.TypedDict):
+ x: int
+ y: int
+ label: str
+
+ # Unpack is considered as typing special form so we always have '~'
+ if sys.version_info[:2] >= (3, 12):
+ expect = r':py:obj:`~typing.Unpack`\ [:py:class:`X`]'
+ assert restify(UnpackCompat['X'], 'fully-qualified-except-typing') == expect
+ assert restify(UnpackCompat['X'], 'smart') == expect
+ else:
+ expect = r':py:obj:`~typing_extensions.Unpack`\ [:py:class:`X`]'
+ assert restify(UnpackCompat['X'], 'fully-qualified-except-typing') == expect
+ assert restify(UnpackCompat['X'], 'smart') == expect
+
+ if sys.version_info[:2] >= (3, 11):
+ expect = r':py:obj:`~typing.Unpack`\ [:py:class:`X`]'
+ assert restify(t.Unpack['X'], 'fully-qualified-except-typing') == expect
+ assert restify(t.Unpack['X'], 'smart') == expect
+
+
@pytest.mark.skipif(sys.version_info[:2] <= (3, 9), reason='python 3.10+ is required.')
def test_restify_type_union_operator():
assert restify(int | None) == ":py:class:`int` | :py:obj:`None`" # type: ignore[attr-defined]
@@ -339,6 +385,21 @@ def test_restify_mock():
assert restify(unknown.secret.Class, "smart") == ':py:class:`~unknown.secret.Class`'
+@pytest.mark.xfail(sys.version_info[:2] <= (3, 9), reason='ParamSpec not supported in Python 3.9.')
+def test_restify_type_hints_paramspec():
+ from typing import ParamSpec
+ P = ParamSpec('P')
+
+ assert restify(P) == ":py:obj:`tests.test_util.test_util_typing.P`"
+ assert restify(P, "smart") == ":py:obj:`~tests.test_util.test_util_typing.P`"
+
+ assert restify(P.args) == "P.args"
+ assert restify(P.args, "smart") == "P.args"
+
+ assert restify(P.kwargs) == "P.kwargs"
+ assert restify(P.kwargs, "smart") == "P.kwargs"
+
+
def test_stringify_annotation():
assert stringify_annotation(int, 'fully-qualified-except-typing') == "int"
assert stringify_annotation(int, "smart") == "int"
@@ -409,13 +470,21 @@ def test_stringify_type_hints_containers():
assert stringify_annotation(MyList[Tuple[int, int]], "fully-qualified") == "tests.test_util.test_util_typing.MyList[typing.Tuple[int, int]]"
assert stringify_annotation(MyList[Tuple[int, int]], "smart") == "~tests.test_util.test_util_typing.MyList[~typing.Tuple[int, int]]"
- assert stringify_annotation(Generator[None, None, None], 'fully-qualified-except-typing') == "Generator[None, None, None]"
- assert stringify_annotation(Generator[None, None, None], "fully-qualified") == "typing.Generator[None, None, None]"
- assert stringify_annotation(Generator[None, None, None], "smart") == "~typing.Generator[None, None, None]"
+ assert stringify_annotation(t.Generator[None, None, None], 'fully-qualified-except-typing') == "Generator[None, None, None]"
+ assert stringify_annotation(t.Generator[None, None, None], "fully-qualified") == "typing.Generator[None, None, None]"
+ assert stringify_annotation(t.Generator[None, None, None], "smart") == "~typing.Generator[None, None, None]"
+
+ assert stringify_annotation(abc.Generator[None, None, None], 'fully-qualified-except-typing') == "collections.abc.Generator[None, None, None]"
+ assert stringify_annotation(abc.Generator[None, None, None], "fully-qualified") == "collections.abc.Generator[None, None, None]"
+ assert stringify_annotation(abc.Generator[None, None, None], "smart") == "~collections.abc.Generator[None, None, None]"
- assert stringify_annotation(Iterator[None], 'fully-qualified-except-typing') == "Iterator[None]"
- assert stringify_annotation(Iterator[None], "fully-qualified") == "typing.Iterator[None]"
- assert stringify_annotation(Iterator[None], "smart") == "~typing.Iterator[None]"
+ assert stringify_annotation(t.Iterator[None], 'fully-qualified-except-typing') == "Iterator[None]"
+ assert stringify_annotation(t.Iterator[None], "fully-qualified") == "typing.Iterator[None]"
+ assert stringify_annotation(t.Iterator[None], "smart") == "~typing.Iterator[None]"
+
+ assert stringify_annotation(abc.Iterator[None], 'fully-qualified-except-typing') == "collections.abc.Iterator[None]"
+ assert stringify_annotation(abc.Iterator[None], "fully-qualified") == "collections.abc.Iterator[None]"
+ assert stringify_annotation(abc.Iterator[None], "smart") == "~collections.abc.Iterator[None]"
def test_stringify_type_hints_pep_585():
@@ -453,9 +522,36 @@ def test_stringify_type_hints_pep_585():
def test_stringify_Annotated():
- from typing import Annotated # type: ignore[attr-defined]
- assert stringify_annotation(Annotated[str, "foo", "bar"], 'fully-qualified-except-typing') == "str"
- assert stringify_annotation(Annotated[str, "foo", "bar"], "smart") == "str"
+ assert stringify_annotation(Annotated[str, "foo", "bar"], 'fully-qualified-except-typing') == "Annotated[str, 'foo', 'bar']"
+ assert stringify_annotation(Annotated[str, "foo", "bar"], 'smart') == "~typing.Annotated[str, 'foo', 'bar']"
+ assert stringify_annotation(Annotated[float, Gt(-10.0)], 'fully-qualified-except-typing') == "Annotated[float, tests.test_util.test_util_typing.Gt(gt=-10.0)]"
+ assert stringify_annotation(Annotated[float, Gt(-10.0)], 'smart') == "~typing.Annotated[float, ~tests.test_util.test_util_typing.Gt(gt=-10.0)]"
+
+
+def test_stringify_Unpack():
+ from typing_extensions import Unpack as UnpackCompat
+
+ class X(t.TypedDict):
+ x: int
+ y: int
+ label: str
+
+ if sys.version_info[:2] >= (3, 11):
+ # typing.Unpack is introduced in 3.11 but typing_extensions.Unpack only
+ # uses typing.Unpack in 3.12+, so the objects are not synchronised with
+ # each other, but we will assume that users use typing.Unpack.
+ import typing
+
+ UnpackCompat = typing.Unpack # NoQA: F811
+ assert stringify_annotation(UnpackCompat['X']) == 'Unpack[X]'
+ assert stringify_annotation(UnpackCompat['X'], 'smart') == '~typing.Unpack[X]'
+ else:
+ assert stringify_annotation(UnpackCompat['X']) == 'typing_extensions.Unpack[X]'
+ assert stringify_annotation(UnpackCompat['X'], 'smart') == '~typing_extensions.Unpack[X]'
+
+ if sys.version_info[:2] >= (3, 11):
+ assert stringify_annotation(t.Unpack['X']) == 'Unpack[X]'
+ assert stringify_annotation(t.Unpack['X'], 'smart') == '~typing.Unpack[X]'
def test_stringify_type_hints_string():
@@ -489,17 +585,29 @@ def test_stringify_type_hints_string():
def test_stringify_type_hints_Callable():
- assert stringify_annotation(Callable, 'fully-qualified-except-typing') == "Callable"
- assert stringify_annotation(Callable, "fully-qualified") == "typing.Callable"
- assert stringify_annotation(Callable, "smart") == "~typing.Callable"
+ assert stringify_annotation(t.Callable, 'fully-qualified-except-typing') == "Callable"
+ assert stringify_annotation(t.Callable, "fully-qualified") == "typing.Callable"
+ assert stringify_annotation(t.Callable, "smart") == "~typing.Callable"
+
+ assert stringify_annotation(t.Callable[[str], int], 'fully-qualified-except-typing') == "Callable[[str], int]"
+ assert stringify_annotation(t.Callable[[str], int], "fully-qualified") == "typing.Callable[[str], int]"
+ assert stringify_annotation(t.Callable[[str], int], "smart") == "~typing.Callable[[str], int]"
- assert stringify_annotation(Callable[[str], int], 'fully-qualified-except-typing') == "Callable[[str], int]"
- assert stringify_annotation(Callable[[str], int], "fully-qualified") == "typing.Callable[[str], int]"
- assert stringify_annotation(Callable[[str], int], "smart") == "~typing.Callable[[str], int]"
+ assert stringify_annotation(t.Callable[..., int], 'fully-qualified-except-typing') == "Callable[[...], int]"
+ assert stringify_annotation(t.Callable[..., int], "fully-qualified") == "typing.Callable[[...], int]"
+ assert stringify_annotation(t.Callable[..., int], "smart") == "~typing.Callable[[...], int]"
- assert stringify_annotation(Callable[..., int], 'fully-qualified-except-typing') == "Callable[[...], int]"
- assert stringify_annotation(Callable[..., int], "fully-qualified") == "typing.Callable[[...], int]"
- assert stringify_annotation(Callable[..., int], "smart") == "~typing.Callable[[...], int]"
+ assert stringify_annotation(abc.Callable, 'fully-qualified-except-typing') == "collections.abc.Callable"
+ assert stringify_annotation(abc.Callable, "fully-qualified") == "collections.abc.Callable"
+ assert stringify_annotation(abc.Callable, "smart") == "~collections.abc.Callable"
+
+ assert stringify_annotation(abc.Callable[[str], int], 'fully-qualified-except-typing') == "collections.abc.Callable[[str], int]"
+ assert stringify_annotation(abc.Callable[[str], int], "fully-qualified") == "collections.abc.Callable[[str], int]"
+ assert stringify_annotation(abc.Callable[[str], int], "smart") == "~collections.abc.Callable[[str], int]"
+
+ assert stringify_annotation(abc.Callable[..., int], 'fully-qualified-except-typing') == "collections.abc.Callable[[...], int]"
+ assert stringify_annotation(abc.Callable[..., int], "fully-qualified") == "collections.abc.Callable[[...], int]"
+ assert stringify_annotation(abc.Callable[..., int], "smart") == "~collections.abc.Callable[[...], int]"
def test_stringify_type_hints_Union():
@@ -578,7 +686,6 @@ def test_stringify_type_hints_alias():
def test_stringify_type_Literal():
- from typing import Literal # type: ignore[attr-defined]
assert stringify_annotation(Literal[1, "2", "\r"], 'fully-qualified-except-typing') == "Literal[1, '2', '\\r']"
assert stringify_annotation(Literal[1, "2", "\r"], "fully-qualified") == "typing.Literal[1, '2', '\\r']"
assert stringify_annotation(Literal[1, "2", "\r"], "smart") == "~typing.Literal[1, '2', '\\r']"
@@ -620,8 +727,6 @@ def test_stringify_mock():
def test_stringify_type_ForwardRef():
- from typing import ForwardRef # type: ignore[attr-defined]
-
assert stringify_annotation(ForwardRef("MyInt")) == "MyInt"
assert stringify_annotation(ForwardRef("MyInt"), 'smart') == "MyInt"
@@ -631,3 +736,21 @@ def test_stringify_type_ForwardRef():
assert stringify_annotation(Tuple[dict[ForwardRef("MyInt"), str], list[List[int]]]) == "Tuple[dict[MyInt, str], list[List[int]]]" # type: ignore[attr-defined]
assert stringify_annotation(Tuple[dict[ForwardRef("MyInt"), str], list[List[int]]], 'fully-qualified-except-typing') == "Tuple[dict[MyInt, str], list[List[int]]]" # type: ignore[attr-defined]
assert stringify_annotation(Tuple[dict[ForwardRef("MyInt"), str], list[List[int]]], 'smart') == "~typing.Tuple[dict[MyInt, str], list[~typing.List[int]]]" # type: ignore[attr-defined]
+
+
+@pytest.mark.xfail(sys.version_info[:2] <= (3, 9), reason='ParamSpec not supported in Python 3.9.')
+def test_stringify_type_hints_paramspec():
+ from typing import ParamSpec
+ P = ParamSpec('P')
+
+ assert stringify_annotation(P, 'fully-qualified') == "~P"
+ assert stringify_annotation(P, 'fully-qualified-except-typing') == "~P"
+ assert stringify_annotation(P, "smart") == "~P"
+
+ assert stringify_annotation(P.args, 'fully-qualified') == "typing.~P"
+ assert stringify_annotation(P.args, 'fully-qualified-except-typing') == "~P"
+ assert stringify_annotation(P.args, "smart") == "~typing.~P"
+
+ assert stringify_annotation(P.kwargs, 'fully-qualified') == "typing.~P"
+ assert stringify_annotation(P.kwargs, 'fully-qualified-except-typing') == "~P"
+ assert stringify_annotation(P.kwargs, "smart") == "~typing.~P"
diff --git a/tests/test_util/typing_test_data.py b/tests/test_util/typing_test_data.py
index e29b600..0588836 100644
--- a/tests/test_util/typing_test_data.py
+++ b/tests/test_util/typing_test_data.py
@@ -1,6 +1,6 @@
from inspect import Signature
from numbers import Integral
-from typing import Any, Callable, Dict, List, Optional, Tuple, TypeVar, Union
+from typing import Any, Callable, Dict, List, Literal, Optional, Tuple, TypeVar, Union
def f0(x: int, y: Integral) -> None:
@@ -121,6 +121,10 @@ def f25(a, b, /):
pass
+def f26(x: Literal[1, 2, 3] = 1, y: Union[Literal["a"], Literal["b"]] = "a") -> None:
+ pass
+
+
class Node:
def __init__(self, parent: Optional['Node']) -> None:
pass