diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-15 17:25:40 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-15 17:25:40 +0000 |
commit | cf7da1843c45a4c2df7a749f7886a2d2ba0ee92a (patch) | |
tree | 18dcde1a8d1f5570a77cd0c361de3b490d02c789 /tests | |
parent | Initial commit. (diff) | |
download | sphinx-cf7da1843c45a4c2df7a749f7886a2d2ba0ee92a.tar.xz sphinx-cf7da1843c45a4c2df7a749f7886a2d2ba0ee92a.zip |
Adding upstream version 7.2.6.upstream/7.2.6
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tests')
943 files changed, 50007 insertions, 0 deletions
diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/__init__.py diff --git a/tests/certs/cert.pem b/tests/certs/cert.pem new file mode 100644 index 0000000..6f8c35c --- /dev/null +++ b/tests/certs/cert.pem @@ -0,0 +1,50 @@ +-----BEGIN PRIVATE KEY----- +MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQC9fzHGBPNaZNcN +nL/1nvO2xJR/E64vFua3QfPQQ5HpigjrK/HRUlRGztRKJ+CEjCXNYNfQ4dUcV45o +k5uPH3U1CkAw2d/We+kZnAHkNuw4mRC0ohdzpUByyDOA5WtUWPn9SwhXCVz6fM7e +I52auvzpUE6soVDM3nucnqZDJ3Ua9KgB02FrqX13S76Uq+uf8Q2hpTruO/nBzB4p +6xFwJJ1taXEEWi8swg6HO8/+0x0AeripV6JieNUptEFuV9kLvRz9qGg0CO2f7AdI +jNeFDGrgO7qJ+VxXV9Gnbi6ph4vsUwtJZB3phRGGomdgiRd6PSma81nvTe1z69x/ +g+8P091pAgMBAAECggEAIrTABfd0JpMffAPAeJjjJA8+70NIfKFiIiA3Kmalu7Mn +TQMgZ+j/PHS3FtnU2hHc/o+FF2G1KVqz311heUYWrl8xQIE26M6K88DJ6+VPQFJw +Z9TkHK8gbaVTIYFjNfCR4J00atRxLgNb0/2L6QHkPksSDbYB2XPKCfZYlyYL4aKq +dePghFu9ePXhUXooPCqke+kP0b8OmHzPlmJpxbeb8ujiox2+4wYjN8lWPz8xHv8i +IM7V5hAbPIaQfu/joKrRKk+Kk8UqGurkKQ75KLLL+1oaJO/GLTQ4bk5tpRgfWPda +aEBzSPrnqame2CKUWtBughuRWSxdTIMvdXIC/ym1gQKBgQDx6Nyio/L6I5CdlXwC +HAzBCy1mnj70Kj97tQc+A/0z8dD7fCSE/oo8IiEKixcjnaSxHk8VjINF/w17n63W +8neE7pVsuDwxfhiQ9ZRI1WpV0LsFEoTrEWG7Ax8UzbHXCQbNJ9SI0HJRo9UN0f/Z +t+ZT+HNUzdcpCwTvdRVDisbXcQKBgQDIiMz58GFEwdGPXJKEhSyQ3kSQBjeqo0Vl +wMDuDvFEckHl/p1RnDo0lzaq6FivOX84ymvGNdQW14TnQp3A/mkQ5o6k/e1pfAA6 +X0Y6tBH/QppVo5sFvOufyn02k48k5pFAjLHH9L9i0dyWqq4V6PgA2uk4qilFxEg/ +CJEVfq4ZeQKBgQCZPHKWq9f8T48J42kcRPxnRFdMC63BKQnxqOifhhNcVi+VPjw7 +6qlSEiRv80+DBhcPAy4BbnKxYjD+QFX0NL80+5S3u7SVfVS+bnGx+U5UcdYmDmcY +KHiJ6B5GJU4j8tnWFwbwa2ofAPKywHWbSnyicF1OON20aACGVtpTYJM4YQKBgBW4 +09NDGZY0FHoeAfT+4/vxR6X+NmtyciL6hSuETNgoNEEwmmPrs1ZdBtvufSTF6qUB +MDlxPT8YK1pNmf78z+63ur3ej6f8eZ3ZEidruANZeJRMO4+cjj1p1rRhuYC6xQMj ++mH5ff27U9SyOlc/PBYDoH212PCouVaym9yjM0KpAoGBALr583slY55ESOthLrfX +1ecoET5xxRm431XbZMnxu0uUvHWNfqoojtmD7laclb9HwkpShPB6PT1egBIvDWWM +bVUuXzJ8gP0tIG3dHgiiUlld3ahOiaMYSU77uLFBRWv5sQqfewLuFvlzHn/2ZSt7 +TcipT4f67b18W8iuLJELEs57 +-----END PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +MIIDuTCCAqGgAwIBAgIUUNvkPwe0W8C2I0+KnLpMaQ+S+vowDQYJKoZIhvcNAQEL +BQAwYTELMAkGA1UEBhMCRlIxETAPBgNVBAgMCEJyZXRhZ25lMQ8wDQYDVQQHDAZS +ZW5uZXMxGjAYBgNVBAoMEVNwaGlueCB0ZXN0IHN1aXRlMRIwEAYDVQQDDAlsb2Nh +bGhvc3QwHhcNMjAxMTE1MTcyNDExWhcNMzAxMTEzMTcyNDExWjBhMQswCQYDVQQG +EwJGUjERMA8GA1UECAwIQnJldGFnbmUxDzANBgNVBAcMBlJlbm5lczEaMBgGA1UE +CgwRU3BoaW54IHRlc3Qgc3VpdGUxEjAQBgNVBAMMCWxvY2FsaG9zdDCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBAL1/McYE81pk1w2cv/We87bElH8Tri8W +5rdB89BDkemKCOsr8dFSVEbO1Eon4ISMJc1g19Dh1RxXjmiTm48fdTUKQDDZ39Z7 +6RmcAeQ27DiZELSiF3OlQHLIM4Dla1RY+f1LCFcJXPp8zt4jnZq6/OlQTqyhUMze +e5yepkMndRr0qAHTYWupfXdLvpSr65/xDaGlOu47+cHMHinrEXAknW1pcQRaLyzC +Doc7z/7THQB6uKlXomJ41Sm0QW5X2Qu9HP2oaDQI7Z/sB0iM14UMauA7uon5XFdX +0aduLqmHi+xTC0lkHemFEYaiZ2CJF3o9KZrzWe9N7XPr3H+D7w/T3WkCAwEAAaNp +MGcwHQYDVR0OBBYEFN1iHZj88N6eI2FlRzza52xzOU5EMB8GA1UdIwQYMBaAFN1i +HZj88N6eI2FlRzza52xzOU5EMA8GA1UdEwEB/wQFMAMBAf8wFAYDVR0RBA0wC4IJ +bG9jYWxob3N0MA0GCSqGSIb3DQEBCwUAA4IBAQBVUZm1iw7N7uZu/SF3hailxS+1 +3KChItWu3ZOIjlmDIkaJ9kWqP2ficUg3tBUx6/UOjHQAwRC4rj87BoSV2mEy+0OX +fyy+ER/BeHYly5v+hpjVojVKeqysk5CKttZM+cOibT2SzLLYf0InNqZRQRJco+nL +QNR0hVo/Lz6Mf1gF2ywf9bXSF3+XECU4K6sVm4QpFbJNm+fHqJBuh1LXHRrcTAsP +LM6PBnd3P5QTcr/G0s/tYMPmero9YHZUO8FMvMVoI2K8k6/duG/EbBaNzriRI1OM +PpZGCWxbJfyApnzc5lGAG4zJnV/wpOyNhKJuW9N1fr2oEwPpJlS3VzrgeKcY +-----END CERTIFICATE----- diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..1b909bd --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,40 @@ +import os +from pathlib import Path + +import docutils +import pytest + +import sphinx +import sphinx.locale + + +def _init_console(locale_dir=sphinx.locale._LOCALE_DIR, catalog='sphinx'): + """Monkeypatch ``init_console`` to skip its action. + + Some tests rely on warning messages in English. We don't want + CLI tests to bleed over those tests and make their warnings + translated. + """ + return sphinx.locale.NullTranslations(), False + + +sphinx.locale.init_console = _init_console + +pytest_plugins = 'sphinx.testing.fixtures' + +# Exclude 'roots' dirs for pytest test collector +collect_ignore = ['roots'] + +os.environ['SPHINX_AUTODOC_RELOAD_MODULES'] = '1' + + +@pytest.fixture(scope='session') +def rootdir(): + return Path(__file__).parent.absolute() / 'roots' + + +def pytest_report_header(config): + header = f"libraries: Sphinx-{sphinx.__display_version__}, docutils-{docutils.__version__}" + if hasattr(config, '_tmp_path_factory'): + header += f"\nbase tmp_path: {config._tmp_path_factory.getbasetemp()}" + return header diff --git a/tests/ext_napoleon_pep526_data_google.py b/tests/ext_napoleon_pep526_data_google.py new file mode 100644 index 0000000..bb55b0f --- /dev/null +++ b/tests/ext_napoleon_pep526_data_google.py @@ -0,0 +1,16 @@ +"""Test module for napoleon PEP 526 compatibility with google style""" + +module_level_var: int = 99 +"""This is an example module level variable""" + + +class PEP526GoogleClass: + """Sample class with PEP 526 annotations and google docstring + + Attributes: + attr1: Attr1 description. + attr2: Attr2 description. + """ + + attr1: int + attr2: str diff --git a/tests/ext_napoleon_pep526_data_numpy.py b/tests/ext_napoleon_pep526_data_numpy.py new file mode 100644 index 0000000..b3093a7 --- /dev/null +++ b/tests/ext_napoleon_pep526_data_numpy.py @@ -0,0 +1,20 @@ +"""Test module for napoleon PEP 526 compatibility with numpy style""" + +module_level_var: int = 99 +"""This is an example module level variable""" + + +class PEP526NumpyClass: + """ + Sample class with PEP 526 annotations and numpy docstring + + Attributes + ---------- + attr1: + Attr1 description + + attr2: + Attr2 description + """ + attr1: int + attr2: str diff --git a/tests/js/documentation_options.js b/tests/js/documentation_options.js new file mode 100644 index 0000000..e736460 --- /dev/null +++ b/tests/js/documentation_options.js @@ -0,0 +1 @@ +const DOCUMENTATION_OPTIONS = {}; diff --git a/tests/js/searchtools.js b/tests/js/searchtools.js new file mode 100644 index 0000000..c9e0c43 --- /dev/null +++ b/tests/js/searchtools.js @@ -0,0 +1,62 @@ +describe('Basic html theme search', function() { + + describe('terms search', function() { + + it('should find "C++" when in index', function() { + index = { + docnames:["index"], + filenames:["index.rst"], + terms:{'c++':0}, + titles:["<no title>"], + titleterms:{} + } + Search.setIndex(index); + searchterms = ['c++']; + excluded = []; + terms = index.terms; + titleterms = index.titleterms; + + hits = [[ + "index", + "<no title>", + "", + null, + 2, + "index.rst" + ]]; + expect(Search.performTermsSearch(searchterms, excluded, terms, titleterms)).toEqual(hits); + }); + + }); + +}); + +// This is regression test for https://github.com/sphinx-doc/sphinx/issues/3150 +describe('splitQuery regression tests', () => { + + it('can split English words', () => { + const parts = splitQuery(' Hello World ') + expect(parts).toEqual(['Hello', 'World']) + }) + + it('can split special characters', () => { + const parts = splitQuery('Pin-Code') + expect(parts).toEqual(['Pin', 'Code']) + }) + + it('can split Chinese characters', () => { + const parts = splitQuery('Hello from 中国 上海') + expect(parts).toEqual(['Hello', 'from', '中国', '上海']) + }) + + it('can split Emoji (surrogate pair) characters. It should keep emojis.', () => { + const parts = splitQuery('😁😁') + expect(parts).toEqual(['😁😁']) + }) + + it('can split umlauts. It should keep umlauts.', () => { + const parts = splitQuery('Löschen Prüfung Abändern ærlig spørsmål') + expect(parts).toEqual(['Löschen', 'Prüfung', 'Abändern', 'ærlig', 'spørsmål']) + }) + +}) diff --git a/tests/js/sphinx_highlight.js b/tests/js/sphinx_highlight.js new file mode 100644 index 0000000..1f52eab --- /dev/null +++ b/tests/js/sphinx_highlight.js @@ -0,0 +1,39 @@ +describe('highlightText', function() { + + const cyrillicTerm = 'шеллы'; + const umlautTerm = 'gänsefüßchen'; + + it('should highlight text incl. special characters correctly in HTML', function() { + const highlightTestSpan = new DOMParser().parseFromString( + '<span>This is the шеллы and Gänsefüßchen test!</span>', 'text/html').body.firstChild + _highlightText(highlightTestSpan, cyrillicTerm, 'highlighted'); + _highlightText(highlightTestSpan, umlautTerm, 'highlighted'); + const expectedHtmlString = + 'This is the <span class=\"highlighted\">шеллы</span> and ' + + '<span class=\"highlighted\">Gänsefüßchen</span> test!'; + expect(highlightTestSpan.innerHTML).toEqual(expectedHtmlString); + }); + + it('should highlight text incl. special characters correctly in SVG', function() { + const highlightTestSvg = new DOMParser().parseFromString( + '<span id="svg-highlight-test">' + + '<svg xmlns="http://www.w3.org/2000/svg" height="50" width="500">' + + '<text x="0" y="15">' + + 'This is the шеллы and Gänsefüßchen test!' + + '</text>' + + '</svg>' + + '</span>', 'text/html').body.firstChild + _highlightText(highlightTestSvg, cyrillicTerm, 'highlighted'); + _highlightText(highlightTestSvg, umlautTerm, 'highlighted'); + /* Note wild cards and ``toMatch``; allowing for some variability + seems to be necessary, even between different FF versions */ + const expectedSvgString = + '<svg xmlns="http://www.w3.org/2000/svg" height="50" width="500">' + + '<rect x=".*" y=".*" width=".*" height=".*" class="highlighted"/>' + + '<rect x=".*" y=".*" width=".*" height=".*" class="highlighted"/>' + + '<text x=".*" y=".*">This is the <tspan>шеллы</tspan> and ' + + '<tspan>Gänsefüßchen</tspan> test!</text></svg>'; + expect(new XMLSerializer().serializeToString(highlightTestSvg.firstChild)).toMatch(new RegExp(expectedSvgString)); + }); + +}); diff --git a/tests/roots/test-add_enumerable_node/conf.py b/tests/roots/test-add_enumerable_node/conf.py new file mode 100644 index 0000000..8c3a568 --- /dev/null +++ b/tests/roots/test-add_enumerable_node/conf.py @@ -0,0 +1,7 @@ +import os +import sys + +sys.path.insert(0, os.path.abspath('.')) +extensions = ['enumerable_node'] + +numfig = True diff --git a/tests/roots/test-add_enumerable_node/enumerable_node.py b/tests/roots/test-add_enumerable_node/enumerable_node.py new file mode 100644 index 0000000..782365e --- /dev/null +++ b/tests/roots/test-add_enumerable_node/enumerable_node.py @@ -0,0 +1,62 @@ +from docutils import nodes +from docutils.parsers.rst import Directive + + +class my_figure(nodes.figure): + pass + + +def visit_my_figure(self, node): + self.visit_figure(node) + + +def depart_my_figure(self, node): + self.depart_figure(node) + + +class MyFigure(Directive): + required_arguments = 1 + has_content = True + + def run(self): + figure_node = my_figure() + figure_node += nodes.image(uri=self.arguments[0]) + figure_node += nodes.caption(text=''.join(self.content)) + return [figure_node] + + +class numbered_text(nodes.Element): + pass + + +def visit_numbered_text(self, node): + self.body.append(self.starttag(node, 'div')) + self.add_fignumber(node) + self.body.append(node['title']) + self.body.append('</div>') + raise nodes.SkipNode + + +def get_title(node): + return node['title'] + + +class NumberedText(Directive): + required_arguments = 1 + final_argument_whitespace = True + + def run(self): + return [numbered_text(title=self.arguments[0])] + + +def setup(app): + # my-figure + app.add_enumerable_node(my_figure, 'figure', + html=(visit_my_figure, depart_my_figure)) + app.add_directive('my-figure', MyFigure) + + # numbered_label + app.add_enumerable_node(numbered_text, 'original', get_title, + html=(visit_numbered_text, None)) + app.add_directive('numbered-text', NumberedText) + app.config.numfig_format.setdefault('original', 'No.%s') diff --git a/tests/roots/test-add_enumerable_node/index.rst b/tests/roots/test-add_enumerable_node/index.rst new file mode 100644 index 0000000..98b858e --- /dev/null +++ b/tests/roots/test-add_enumerable_node/index.rst @@ -0,0 +1,48 @@ +======================== +test-add_enumerable_node +======================== + +.. toctree:: + :numbered: + + +First section +============= + +.. _first_figure: + +.. figure:: rimg.png + + First figure + +.. _first_my_figure: + +.. my-figure:: rimg.png + + First my figure + +.. _first_numbered_text: + +.. numbered-text:: Hello world + +.. _second_numbered_text: + +.. numbered-text:: Hello Sphinx + +Second section +============== + +.. _second_my_figure: + +.. my-figure:: rimg.png + + Second my figure + +Reference section +================= + +* first_figure is :numref:`first_figure` +* first_my_figure is :numref:`first_my_figure` +* second_my_figure is :numref:`second_my_figure` +* first numbered_text is :numref:`first_numbered_text` +* second numbered_text is :numref:`second_numbered_text` diff --git a/tests/roots/test-add_enumerable_node/rimg.png b/tests/roots/test-add_enumerable_node/rimg.png Binary files differnew file mode 100644 index 0000000..fda6cd2 --- /dev/null +++ b/tests/roots/test-add_enumerable_node/rimg.png diff --git a/tests/roots/test-add_source_parser-conflicts-with-users-setting/conf.py b/tests/roots/test-add_source_parser-conflicts-with-users-setting/conf.py new file mode 100644 index 0000000..3ad5491 --- /dev/null +++ b/tests/roots/test-add_source_parser-conflicts-with-users-setting/conf.py @@ -0,0 +1,17 @@ +import os +import sys + +from docutils.parsers import Parser + +sys.path.insert(0, os.path.abspath('.')) + + +class DummyTestParser(Parser): + supported = ('dummy',) + + +extensions = ['source_parser'] +source_suffix = ['.rst', '.test'] +source_parsers = { + '.test': DummyTestParser +} diff --git a/tests/roots/test-add_source_parser-conflicts-with-users-setting/source_parser.py b/tests/roots/test-add_source_parser-conflicts-with-users-setting/source_parser.py new file mode 100644 index 0000000..69ad02d --- /dev/null +++ b/tests/roots/test-add_source_parser-conflicts-with-users-setting/source_parser.py @@ -0,0 +1,10 @@ +from docutils.parsers import Parser + + +class TestSourceParser(Parser): + supported = ('test',) + + +def setup(app): + app.add_source_suffix('.test', 'test') + app.add_source_parser(TestSourceParser) diff --git a/tests/roots/test-add_source_parser/conf.py b/tests/roots/test-add_source_parser/conf.py new file mode 100644 index 0000000..2acd4d2 --- /dev/null +++ b/tests/roots/test-add_source_parser/conf.py @@ -0,0 +1,8 @@ +import os +import sys + +sys.path.insert(0, os.path.abspath('.')) + + +extensions = ['source_parser'] +source_suffix = ['.rst'] diff --git a/tests/roots/test-add_source_parser/source_parser.py b/tests/roots/test-add_source_parser/source_parser.py new file mode 100644 index 0000000..69ad02d --- /dev/null +++ b/tests/roots/test-add_source_parser/source_parser.py @@ -0,0 +1,10 @@ +from docutils.parsers import Parser + + +class TestSourceParser(Parser): + supported = ('test',) + + +def setup(app): + app.add_source_suffix('.test', 'test') + app.add_source_parser(TestSourceParser) diff --git a/tests/roots/test-api-set-translator/conf.py b/tests/roots/test-api-set-translator/conf.py new file mode 100644 index 0000000..3b56c39 --- /dev/null +++ b/tests/roots/test-api-set-translator/conf.py @@ -0,0 +1,72 @@ +# set this by test +# import os +# import sys +# sys.path.insert(0, os.path.abspath('.')) + +from docutils.writers.docutils_xml import XMLTranslator + +from sphinx.writers.html import HTML5Translator +from sphinx.writers.latex import LaTeXTranslator +from sphinx.writers.manpage import ManualPageTranslator +from sphinx.writers.texinfo import TexinfoTranslator +from sphinx.writers.text import TextTranslator + +project = 'test' + + +class ConfHTMLTranslator(HTML5Translator): + pass + + +class ConfDirHTMLTranslator(HTML5Translator): + pass + + +class ConfSingleHTMLTranslator(HTML5Translator): + pass + + +class ConfPickleTranslator(HTML5Translator): + pass + + +class ConfJsonTranslator(HTML5Translator): + pass + + +class ConfLaTeXTranslator(LaTeXTranslator): + pass + + +class ConfManualPageTranslator(ManualPageTranslator): + pass + + +class ConfTexinfoTranslator(TexinfoTranslator): + pass + + +class ConfTextTranslator(TextTranslator): + pass + + +class ConfXMLTranslator(XMLTranslator): + pass + + +class ConfPseudoXMLTranslator(XMLTranslator): + pass + + +def setup(app): + app.set_translator('html', ConfHTMLTranslator) + app.set_translator('dirhtml', ConfDirHTMLTranslator) + app.set_translator('singlehtml', ConfSingleHTMLTranslator) + app.set_translator('pickle', ConfPickleTranslator) + app.set_translator('json', ConfJsonTranslator) + app.set_translator('latex', ConfLaTeXTranslator) + app.set_translator('man', ConfManualPageTranslator) + app.set_translator('texinfo', ConfTexinfoTranslator) + app.set_translator('text', ConfTextTranslator) + app.set_translator('xml', ConfXMLTranslator) + app.set_translator('pseudoxml', ConfPseudoXMLTranslator) diff --git a/tests/roots/test-api-set-translator/index.rst b/tests/roots/test-api-set-translator/index.rst new file mode 100644 index 0000000..4a7d692 --- /dev/null +++ b/tests/roots/test-api-set-translator/index.rst @@ -0,0 +1,3 @@ +======================= +Test API set_translator +======================= diff --git a/tests/roots/test-api-set-translator/nonext/conf.py b/tests/roots/test-api-set-translator/nonext/conf.py new file mode 100644 index 0000000..f93e4da --- /dev/null +++ b/tests/roots/test-api-set-translator/nonext/conf.py @@ -0,0 +1,6 @@ +import os +import sys + +sys.path.insert(0, os.path.dirname(os.path.abspath('.'))) + +project = 'test' diff --git a/tests/roots/test-api-set-translator/translator.py b/tests/roots/test-api-set-translator/translator.py new file mode 100644 index 0000000..3adbf76 --- /dev/null +++ b/tests/roots/test-api-set-translator/translator.py @@ -0,0 +1,5 @@ +from sphinx.writers.html import HTML5Translator + + +class ExtHTMLTranslator(HTML5Translator): + pass diff --git a/tests/roots/test-apidoc-duplicates/fish_licence/halibut.cpython-38-x86_64-linux-gnu.so b/tests/roots/test-apidoc-duplicates/fish_licence/halibut.cpython-38-x86_64-linux-gnu.so new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-apidoc-duplicates/fish_licence/halibut.cpython-38-x86_64-linux-gnu.so diff --git a/tests/roots/test-apidoc-duplicates/fish_licence/halibut.pyx b/tests/roots/test-apidoc-duplicates/fish_licence/halibut.pyx new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-apidoc-duplicates/fish_licence/halibut.pyx diff --git a/tests/roots/test-apidoc-pep420/a/b/c/__init__.py b/tests/roots/test-apidoc-pep420/a/b/c/__init__.py new file mode 100644 index 0000000..5b727c1 --- /dev/null +++ b/tests/roots/test-apidoc-pep420/a/b/c/__init__.py @@ -0,0 +1 @@ +"Package C" diff --git a/tests/roots/test-apidoc-pep420/a/b/c/d.py b/tests/roots/test-apidoc-pep420/a/b/c/d.py new file mode 100644 index 0000000..63b0e34 --- /dev/null +++ b/tests/roots/test-apidoc-pep420/a/b/c/d.py @@ -0,0 +1 @@ +"Module d" diff --git a/tests/roots/test-apidoc-pep420/a/b/e/__init__.py b/tests/roots/test-apidoc-pep420/a/b/e/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-apidoc-pep420/a/b/e/__init__.py diff --git a/tests/roots/test-apidoc-pep420/a/b/e/f.py b/tests/roots/test-apidoc-pep420/a/b/e/f.py new file mode 100644 index 0000000..a09affe --- /dev/null +++ b/tests/roots/test-apidoc-pep420/a/b/e/f.py @@ -0,0 +1 @@ +"Module f" diff --git a/tests/roots/test-apidoc-pep420/a/b/x/y.py b/tests/roots/test-apidoc-pep420/a/b/x/y.py new file mode 100644 index 0000000..46bc245 --- /dev/null +++ b/tests/roots/test-apidoc-pep420/a/b/x/y.py @@ -0,0 +1 @@ +"Module y" diff --git a/tests/roots/test-apidoc-subpackage-in-toc/parent/__init__.py b/tests/roots/test-apidoc-subpackage-in-toc/parent/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-apidoc-subpackage-in-toc/parent/__init__.py diff --git a/tests/roots/test-apidoc-subpackage-in-toc/parent/child/__init__.py b/tests/roots/test-apidoc-subpackage-in-toc/parent/child/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-apidoc-subpackage-in-toc/parent/child/__init__.py diff --git a/tests/roots/test-apidoc-subpackage-in-toc/parent/child/foo.py b/tests/roots/test-apidoc-subpackage-in-toc/parent/child/foo.py new file mode 100644 index 0000000..810c96e --- /dev/null +++ b/tests/roots/test-apidoc-subpackage-in-toc/parent/child/foo.py @@ -0,0 +1 @@ +"foo" diff --git a/tests/roots/test-apidoc-toc/mypackage/__init__.py b/tests/roots/test-apidoc-toc/mypackage/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-apidoc-toc/mypackage/__init__.py diff --git a/tests/roots/test-apidoc-toc/mypackage/main.py b/tests/roots/test-apidoc-toc/mypackage/main.py new file mode 100755 index 0000000..f532813 --- /dev/null +++ b/tests/roots/test-apidoc-toc/mypackage/main.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python3 + +import os + +import mod_resource +import mod_something + +if __name__ == "__main__": + print(f"Hello, world! -> something returns: {mod_something.something()}") + + res_path = \ + os.path.join(os.path.dirname(mod_resource.__file__), 'resource.txt') + with open(res_path, encoding='utf-8') as f: + text = f.read() + print(f"From mod_resource:resource.txt -> {text}") diff --git a/tests/roots/test-apidoc-toc/mypackage/no_init/foo.py b/tests/roots/test-apidoc-toc/mypackage/no_init/foo.py new file mode 100644 index 0000000..ece50cb --- /dev/null +++ b/tests/roots/test-apidoc-toc/mypackage/no_init/foo.py @@ -0,0 +1 @@ +MESSAGE = "There's no __init__.py in this folder, hence we should be left out" diff --git a/tests/roots/test-apidoc-toc/mypackage/resource/__init__.py b/tests/roots/test-apidoc-toc/mypackage/resource/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-apidoc-toc/mypackage/resource/__init__.py diff --git a/tests/roots/test-apidoc-toc/mypackage/resource/resource.txt b/tests/roots/test-apidoc-toc/mypackage/resource/resource.txt new file mode 100644 index 0000000..c04433e --- /dev/null +++ b/tests/roots/test-apidoc-toc/mypackage/resource/resource.txt @@ -0,0 +1 @@ +This is a text resource to be included in this otherwise empty module. No python contents here. diff --git a/tests/roots/test-apidoc-toc/mypackage/something/__init__.py b/tests/roots/test-apidoc-toc/mypackage/something/__init__.py new file mode 100644 index 0000000..6401e43 --- /dev/null +++ b/tests/roots/test-apidoc-toc/mypackage/something/__init__.py @@ -0,0 +1 @@ +"Subpackage Something" diff --git a/tests/roots/test-apidoc-trailing-underscore/package_/__init__.py b/tests/roots/test-apidoc-trailing-underscore/package_/__init__.py new file mode 100644 index 0000000..b09612b --- /dev/null +++ b/tests/roots/test-apidoc-trailing-underscore/package_/__init__.py @@ -0,0 +1 @@ +""" A package with trailing underscores """ diff --git a/tests/roots/test-apidoc-trailing-underscore/package_/module_.py b/tests/roots/test-apidoc-trailing-underscore/package_/module_.py new file mode 100644 index 0000000..e16461c --- /dev/null +++ b/tests/roots/test-apidoc-trailing-underscore/package_/module_.py @@ -0,0 +1,9 @@ +""" A module with a trailing underscore """ + + +class SomeClass_: + """ A class with a trailing underscore """ + + +def some_function_(some_arg_): + """ A function with a trailing underscore in name and argument """ diff --git a/tests/roots/test-autosummary/conf.py b/tests/roots/test-autosummary/conf.py new file mode 100644 index 0000000..46cf4fa --- /dev/null +++ b/tests/roots/test-autosummary/conf.py @@ -0,0 +1,12 @@ +import os +import sys + +sys.path.insert(0, os.path.abspath('.')) + +extensions = ['sphinx.ext.autosummary'] + +# The suffix of source filenames. +source_suffix = '.rst' +autosummary_generate = True + +exclude_patterns = ['_build'] diff --git a/tests/roots/test-autosummary/dummy_module.py b/tests/roots/test-autosummary/dummy_module.py new file mode 100644 index 0000000..4adc031 --- /dev/null +++ b/tests/roots/test-autosummary/dummy_module.py @@ -0,0 +1,85 @@ +""" +.. autosummary:: + + module_attr + C.class_attr + C.instance_attr + C.prop_attr1 + C.prop_attr2 + C.C2 +""" + + +def withSentence(): + '''I have a sentence which + spans multiple lines. Then I have + more stuff + ''' + pass + + +def noSentence(): + '''this doesn't start with a + capital. so it's not considered + a sentence + ''' + pass + + +def emptyLine(): + '''This is the real summary + + However, it did't end with a period. + ''' + pass + + +#: This is a module attribute +#: +#: value is integer. +module_attr = 1 + + +class C: + ''' + My C class + + with class_attr attribute + ''' + + #: This is a class attribute + #: + #: value is integer. + class_attr = 42 + + def __init__(self): + #: This is an instance attribute + #: + #: value is a string + self.instance_attr = "42" + + def _prop_attr_get(self): + """ + This is a function docstring + + return value is string. + """ + return 'spam egg' + + prop_attr1 = property(_prop_attr_get) + + prop_attr2 = property(_prop_attr_get) + """ + This is a attribute docstring + + value is string. + """ + + class C2: + ''' + This is a nested inner class docstring + ''' + + +def func(arg_, *args, **kwargs): + """Test function take an argument ended with underscore.""" diff --git a/tests/roots/test-autosummary/index.rst b/tests/roots/test-autosummary/index.rst new file mode 100644 index 0000000..5ddc4bd --- /dev/null +++ b/tests/roots/test-autosummary/index.rst @@ -0,0 +1,8 @@ + +.. autosummary:: + :nosignatures: + :toctree: + + dummy_module + underscore_module_ + sphinx diff --git a/tests/roots/test-autosummary/sphinx.rst b/tests/roots/test-autosummary/sphinx.rst new file mode 100644 index 0000000..fc1a35a --- /dev/null +++ b/tests/roots/test-autosummary/sphinx.rst @@ -0,0 +1,31 @@ +Autosummary test +================ + +.. autosummary:: + :toctree: generated + + sphinx.application.Sphinx + +.. currentmodule:: sphinx.application + +.. autoclass:: TemplateBridge + + Basic test + + .. autosummary:: + + render -- some ignored stuff goes here + render_string More ignored stuff + + Test with tildes + + .. autosummary:: + + ~TemplateBridge.render + ~TemplateBridge.render_string + + Methods: + + .. automethod:: render + + .. automethod:: render_string diff --git a/tests/roots/test-autosummary/underscore_module_.py b/tests/roots/test-autosummary/underscore_module_.py new file mode 100644 index 0000000..8584e60 --- /dev/null +++ b/tests/roots/test-autosummary/underscore_module_.py @@ -0,0 +1,15 @@ +""" +module with trailing underscores everywhere +""" + + +class class_: + """ Class """ + def method_(_arg): + """ Method """ + pass + + +def function_(_arg): + """ Function """ + pass diff --git a/tests/roots/test-basic/conf.py b/tests/roots/test-basic/conf.py new file mode 100644 index 0000000..e274bde --- /dev/null +++ b/tests/roots/test-basic/conf.py @@ -0,0 +1,3 @@ +latex_documents = [ + ('index', 'test.tex', 'The basic Sphinx documentation for testing', 'Sphinx', 'report') +] diff --git a/tests/roots/test-basic/index.rst b/tests/roots/test-basic/index.rst new file mode 100644 index 0000000..af12ed6 --- /dev/null +++ b/tests/roots/test-basic/index.rst @@ -0,0 +1,31 @@ +The basic Sphinx documentation for testing +========================================== + +Sphinx is a tool that makes it easy to create intelligent and beautiful +documentation for Python projects (or other documents consisting of multiple +reStructuredText sources), written by Georg Brandl. It was originally created +for the new Python documentation, and has excellent facilities for Python +project documentation, but C/C++ is supported as well, and more languages are +planned. + +Sphinx uses reStructuredText as its markup language, and many of its strengths +come from the power and straightforwardness of reStructuredText and its parsing +and translating suite, the Docutils. + +features +-------- + +Among its features are the following: + +* Output formats: HTML (including derivative formats such as HTML Help, Epub + and Qt Help), plain text, manual pages and LaTeX or direct PDF output + using rst2pdf +* Extensive cross-references: semantic markup and automatic links + for functions, classes, glossary terms and similar pieces of information +* Hierarchical structure: easy definition of a document tree, with automatic + links to siblings, parents and children +* Automatic indices: general index as well as a module index +* Code handling: automatic highlighting using the Pygments highlighter +* Flexible HTML output using the Jinja 2 templating engine +* Various extensions are available, e.g. for automatic testing of snippets + and inclusion of appropriately formatted docstrings diff --git a/tests/roots/test-build-html-theme-having-multiple-stylesheets/_themes/mytheme/_static/extra.css b/tests/roots/test-build-html-theme-having-multiple-stylesheets/_themes/mytheme/_static/extra.css new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-build-html-theme-having-multiple-stylesheets/_themes/mytheme/_static/extra.css diff --git a/tests/roots/test-build-html-theme-having-multiple-stylesheets/_themes/mytheme/_static/mytheme.css b/tests/roots/test-build-html-theme-having-multiple-stylesheets/_themes/mytheme/_static/mytheme.css new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-build-html-theme-having-multiple-stylesheets/_themes/mytheme/_static/mytheme.css diff --git a/tests/roots/test-build-html-theme-having-multiple-stylesheets/_themes/mytheme/theme.conf b/tests/roots/test-build-html-theme-having-multiple-stylesheets/_themes/mytheme/theme.conf new file mode 100644 index 0000000..c87296e --- /dev/null +++ b/tests/roots/test-build-html-theme-having-multiple-stylesheets/_themes/mytheme/theme.conf @@ -0,0 +1,3 @@ +[theme] +inherit = basic +stylesheet = mytheme.css, extra.css diff --git a/tests/roots/test-build-html-theme-having-multiple-stylesheets/conf.py b/tests/roots/test-build-html-theme-having-multiple-stylesheets/conf.py new file mode 100644 index 0000000..3cb43d6 --- /dev/null +++ b/tests/roots/test-build-html-theme-having-multiple-stylesheets/conf.py @@ -0,0 +1,2 @@ +html_theme_path = ['_themes'] +html_theme = 'mytheme' diff --git a/tests/roots/test-build-html-theme-having-multiple-stylesheets/index.rst b/tests/roots/test-build-html-theme-having-multiple-stylesheets/index.rst new file mode 100644 index 0000000..b8b81f9 --- /dev/null +++ b/tests/roots/test-build-html-theme-having-multiple-stylesheets/index.rst @@ -0,0 +1,2 @@ +test-build-html-theme-having-multiple-stylesheets +================================================= diff --git a/tests/roots/test-build-html-translator/conf.py b/tests/roots/test-build-html-translator/conf.py new file mode 100644 index 0000000..89448d4 --- /dev/null +++ b/tests/roots/test-build-html-translator/conf.py @@ -0,0 +1,16 @@ +from sphinx.writers.html import HTML5Translator + +project = 'test' + + +class ConfHTMLTranslator(HTML5Translator): + depart_with_node = 0 + + def depart_admonition(self, node=None): + if node is not None: + self.depart_with_node += 1 + HTML5Translator.depart_admonition(self, node) + + +def setup(app): + app.set_translator('html', ConfHTMLTranslator) diff --git a/tests/roots/test-build-html-translator/index.rst b/tests/roots/test-build-html-translator/index.rst new file mode 100644 index 0000000..1610d2b --- /dev/null +++ b/tests/roots/test-build-html-translator/index.rst @@ -0,0 +1,24 @@ +======================= +Test HTML admonitions +======================= + +.. seealso:: test + +.. note:: test + +.. warning:: test + +.. attention:: test + +.. caution:: test + +.. danger:: test + +.. error:: test + +.. hint:: test + +.. important:: test + +.. tip:: test + diff --git a/tests/roots/test-build-text/conf.py b/tests/roots/test-build-text/conf.py new file mode 100644 index 0000000..fd9eefb --- /dev/null +++ b/tests/roots/test-build-text/conf.py @@ -0,0 +1,2 @@ +source_suffix = '.txt' +exclude_patterns = ['_build'] diff --git a/tests/roots/test-build-text/doc1.txt b/tests/roots/test-build-text/doc1.txt new file mode 100644 index 0000000..da1909a --- /dev/null +++ b/tests/roots/test-build-text/doc1.txt @@ -0,0 +1,2 @@ +Section A +========= diff --git a/tests/roots/test-build-text/doc2.txt b/tests/roots/test-build-text/doc2.txt new file mode 100644 index 0000000..ebc88e9 --- /dev/null +++ b/tests/roots/test-build-text/doc2.txt @@ -0,0 +1,9 @@ +Section B +========= + +Sub Ba +------ + +Sub Bb +------ + diff --git a/tests/roots/test-build-text/index.txt b/tests/roots/test-build-text/index.txt new file mode 100644 index 0000000..ca9f8dc --- /dev/null +++ b/tests/roots/test-build-text/index.txt @@ -0,0 +1,11 @@ +.. toctree:: + :numbered: + + doc1 + doc2 + maxwidth + lineblock + nonascii_title + nonascii_table + nonascii_maxwidth + table diff --git a/tests/roots/test-build-text/lineblock.txt b/tests/roots/test-build-text/lineblock.txt new file mode 100644 index 0000000..b9cd0ed --- /dev/null +++ b/tests/roots/test-build-text/lineblock.txt @@ -0,0 +1,6 @@ +* one + + | line-block 1 + | line-block 2 + +followed paragraph. diff --git a/tests/roots/test-build-text/listitems.txt b/tests/roots/test-build-text/listitems.txt new file mode 100644 index 0000000..f0952d8 --- /dev/null +++ b/tests/roots/test-build-text/listitems.txt @@ -0,0 +1,4 @@ +.. seealso:: + + * item 1 + * item 2 diff --git a/tests/roots/test-build-text/maxwidth.txt b/tests/roots/test-build-text/maxwidth.txt new file mode 100644 index 0000000..c36f8a0 --- /dev/null +++ b/tests/roots/test-build-text/maxwidth.txt @@ -0,0 +1,6 @@ +.. seealso:: ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham + +* ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham +* ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham + +spam egg diff --git a/tests/roots/test-build-text/nonascii_maxwidth.txt b/tests/roots/test-build-text/nonascii_maxwidth.txt new file mode 100644 index 0000000..e9f0fd9 --- /dev/null +++ b/tests/roots/test-build-text/nonascii_maxwidth.txt @@ -0,0 +1,5 @@ +abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc + +日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 + +abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 diff --git a/tests/roots/test-build-text/nonascii_table.txt b/tests/roots/test-build-text/nonascii_table.txt new file mode 100644 index 0000000..709e0f2 --- /dev/null +++ b/tests/roots/test-build-text/nonascii_table.txt @@ -0,0 +1,7 @@ +.. list-table:: + + - - spam + - egg + + - - 日本語 + - 日本語 diff --git a/tests/roots/test-build-text/nonascii_title.txt b/tests/roots/test-build-text/nonascii_title.txt new file mode 100644 index 0000000..6d3b1f6 --- /dev/null +++ b/tests/roots/test-build-text/nonascii_title.txt @@ -0,0 +1,2 @@ +日本語 +====== diff --git a/tests/roots/test-build-text/table.txt b/tests/roots/test-build-text/table.txt new file mode 100644 index 0000000..adc8b37 --- /dev/null +++ b/tests/roots/test-build-text/table.txt @@ -0,0 +1,7 @@ ++-----+-----+ +| XXX | XXX | ++-----+-----+ +| | XXX | ++-----+-----+ +| XXX | | ++-----+-----+ diff --git a/tests/roots/test-build-text/table_colspan.txt b/tests/roots/test-build-text/table_colspan.txt new file mode 100644 index 0000000..4ae6637 --- /dev/null +++ b/tests/roots/test-build-text/table_colspan.txt @@ -0,0 +1,7 @@ ++-----+-----+ +| XXX | XXX | ++-----+-----+ +| | XXX | ++-----+ | +| XXX | | ++-----+-----+ diff --git a/tests/roots/test-build-text/table_colspan_and_rowspan.txt b/tests/roots/test-build-text/table_colspan_and_rowspan.txt new file mode 100644 index 0000000..82d3607 --- /dev/null +++ b/tests/roots/test-build-text/table_colspan_and_rowspan.txt @@ -0,0 +1,7 @@ ++-----------+-----+ +| AAA | BBB | ++-----+-----+ | +| | XXX | | +| +-----+-----+ +| DDD | CCC | ++-----+-----------+ diff --git a/tests/roots/test-build-text/table_colspan_left.txt b/tests/roots/test-build-text/table_colspan_left.txt new file mode 100644 index 0000000..dbfa324 --- /dev/null +++ b/tests/roots/test-build-text/table_colspan_left.txt @@ -0,0 +1,7 @@ ++-----+-----+ +| XXX | XXX | ++-----+-----+ +| | XXX | +| +-----+ +| XXX | | ++-----+-----+ diff --git a/tests/roots/test-build-text/table_rowspan.txt b/tests/roots/test-build-text/table_rowspan.txt new file mode 100644 index 0000000..36c30eb --- /dev/null +++ b/tests/roots/test-build-text/table_rowspan.txt @@ -0,0 +1,7 @@ ++-----+-----+ +| XXXXXXXXX | ++-----+-----+ +| | XXX | ++-----+-----+ +| XXX | | ++-----+-----+ diff --git a/tests/roots/test-builder-dirhtml/bar.rst b/tests/roots/test-builder-dirhtml/bar.rst new file mode 100644 index 0000000..11f287a --- /dev/null +++ b/tests/roots/test-builder-dirhtml/bar.rst @@ -0,0 +1,4 @@ +.. _bar: + +bar +=== diff --git a/tests/roots/test-builder-dirhtml/conf.py b/tests/roots/test-builder-dirhtml/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-builder-dirhtml/conf.py diff --git a/tests/roots/test-builder-dirhtml/foo/foo_1.rst b/tests/roots/test-builder-dirhtml/foo/foo_1.rst new file mode 100644 index 0000000..6db0ea5 --- /dev/null +++ b/tests/roots/test-builder-dirhtml/foo/foo_1.rst @@ -0,0 +1,4 @@ +.. _foo_1: + +foo/foo_1 +========= diff --git a/tests/roots/test-builder-dirhtml/foo/foo_2.rst b/tests/roots/test-builder-dirhtml/foo/foo_2.rst new file mode 100644 index 0000000..fae7f26 --- /dev/null +++ b/tests/roots/test-builder-dirhtml/foo/foo_2.rst @@ -0,0 +1,4 @@ +.. _foo_2: + +foo/foo_2 +========= diff --git a/tests/roots/test-builder-dirhtml/foo/index.rst b/tests/roots/test-builder-dirhtml/foo/index.rst new file mode 100644 index 0000000..92d473c --- /dev/null +++ b/tests/roots/test-builder-dirhtml/foo/index.rst @@ -0,0 +1,9 @@ +.. _foo: + +foo/index +========= + +.. toctree:: + + foo_1 + foo_2 diff --git a/tests/roots/test-builder-dirhtml/index.rst b/tests/roots/test-builder-dirhtml/index.rst new file mode 100644 index 0000000..274e177 --- /dev/null +++ b/tests/roots/test-builder-dirhtml/index.rst @@ -0,0 +1,9 @@ +.. _index: + +index +===== + +.. toctree:: + + foo/index + bar diff --git a/tests/roots/test-builder-gettext-dont-rebuild-mo/bom.rst b/tests/roots/test-builder-gettext-dont-rebuild-mo/bom.rst new file mode 100644 index 0000000..3fea824 --- /dev/null +++ b/tests/roots/test-builder-gettext-dont-rebuild-mo/bom.rst @@ -0,0 +1,5 @@ +File with UTF-8 BOM +=================== + +This file has a UTF-8 "BOM". + diff --git a/tests/roots/test-builder-gettext-dont-rebuild-mo/conf.py b/tests/roots/test-builder-gettext-dont-rebuild-mo/conf.py new file mode 100644 index 0000000..d13f727 --- /dev/null +++ b/tests/roots/test-builder-gettext-dont-rebuild-mo/conf.py @@ -0,0 +1 @@ +language = 'xx' diff --git a/tests/roots/test-builder-gettext-dont-rebuild-mo/index.rst b/tests/roots/test-builder-gettext-dont-rebuild-mo/index.rst new file mode 100644 index 0000000..7ff38c5 --- /dev/null +++ b/tests/roots/test-builder-gettext-dont-rebuild-mo/index.rst @@ -0,0 +1,6 @@ +The basic Sphinx documentation for testing +========================================== + +.. toctree:: + + bom diff --git a/tests/roots/test-builder-gettext-dont-rebuild-mo/xx/LC_MESSAGES/bom.po b/tests/roots/test-builder-gettext-dont-rebuild-mo/xx/LC_MESSAGES/bom.po new file mode 100644 index 0000000..c6025eb --- /dev/null +++ b/tests/roots/test-builder-gettext-dont-rebuild-mo/xx/LC_MESSAGES/bom.po @@ -0,0 +1,12 @@ +#, fuzzy +msgid "" +msgstr "" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "File with UTF-8 BOM" +msgstr "Datei mit UTF-8" + +msgid "This file has a UTF-8 \"BOM\"." +msgstr "This file has umlauts: äöü." diff --git a/tests/roots/test-changes/base.rst b/tests/roots/test-changes/base.rst new file mode 100644 index 0000000..a1b2839 --- /dev/null +++ b/tests/roots/test-changes/base.rst @@ -0,0 +1,20 @@ +Version markup +-------------- + +.. versionadded:: 0.6 + Some funny **stuff**. + +.. versionchanged:: 0.6 + Even more funny stuff. + +.. deprecated:: 0.6 + Boring stuff. + +.. versionadded:: 1.2 + + First paragraph of versionadded. + +.. versionchanged:: 1.2 + First paragraph of versionchanged. + + Second paragraph of versionchanged. diff --git a/tests/roots/test-changes/c-api.rst b/tests/roots/test-changes/c-api.rst new file mode 100644 index 0000000..f0ad413 --- /dev/null +++ b/tests/roots/test-changes/c-api.rst @@ -0,0 +1,24 @@ +.. highlight:: c + + +Memory +====== + +.. c:function:: void* Test_Malloc(size_t n) + + Allocate *n* bytes of memory. + + .. versionchanged:: 0.6 + + Can now be replaced with a different allocator. + +System +------ + +Access to the system allocator. + +.. versionadded:: 0.6 + +.. c:function:: void* Test_SysMalloc(size_t n) + + Allocate *n* bytes of memory using system allocator. diff --git a/tests/roots/test-changes/conf.py b/tests/roots/test-changes/conf.py new file mode 100644 index 0000000..c3b2169 --- /dev/null +++ b/tests/roots/test-changes/conf.py @@ -0,0 +1,4 @@ +project = 'Sphinx ChangesBuilder tests' +copyright = '2007-2023 by the Sphinx team, see AUTHORS' +version = '0.6' +release = '0.6alpha1' diff --git a/tests/roots/test-changes/contents.rst b/tests/roots/test-changes/contents.rst new file mode 100644 index 0000000..ced8026 --- /dev/null +++ b/tests/roots/test-changes/contents.rst @@ -0,0 +1,13 @@ +Index for ChangesBuilder tests +============================== + +Contents: + +.. toctree:: + :maxdepth: 2 + :caption: Table of Contents + :name: mastertoc + + base + c-api + library/utils diff --git a/tests/roots/test-changes/library/utils.rst b/tests/roots/test-changes/library/utils.rst new file mode 100644 index 0000000..8644699 --- /dev/null +++ b/tests/roots/test-changes/library/utils.rst @@ -0,0 +1,25 @@ +:mod:`utils` --- Fake utilities module for tests +================================================ + +.. module:: utils + :synopsis: Utility functions + +-------------- + +The :mod:`utils` module is a pretend python module for changes testing. + + +Classes +------- + +.. class:: Path + + Class for handling paths. + + .. versionadded:: 0.5 + + Innovative new way to handle paths. + + .. deprecated:: 0.6 + + So, that was a bad idea it turns out. diff --git a/tests/roots/test-circular/conf.py b/tests/roots/test-circular/conf.py new file mode 100644 index 0000000..a45d22e --- /dev/null +++ b/tests/roots/test-circular/conf.py @@ -0,0 +1 @@ +exclude_patterns = ['_build'] diff --git a/tests/roots/test-circular/index.rst b/tests/roots/test-circular/index.rst new file mode 100644 index 0000000..294e674 --- /dev/null +++ b/tests/roots/test-circular/index.rst @@ -0,0 +1,4 @@ +.. toctree:: + + sub + diff --git a/tests/roots/test-circular/sub.rst b/tests/roots/test-circular/sub.rst new file mode 100644 index 0000000..cebfd65 --- /dev/null +++ b/tests/roots/test-circular/sub.rst @@ -0,0 +1,3 @@ +.. toctree:: + + index diff --git a/tests/roots/test-config/conf.py b/tests/roots/test-config/conf.py new file mode 100644 index 0000000..0027d87 --- /dev/null +++ b/tests/roots/test-config/conf.py @@ -0,0 +1,3 @@ +project = 'Sphinx <Tests>' +release = '0.6alpha1' +templates_path = ['_templates'] diff --git a/tests/roots/test-copyright-multiline/conf.py b/tests/roots/test-copyright-multiline/conf.py new file mode 100644 index 0000000..a2b7b68 --- /dev/null +++ b/tests/roots/test-copyright-multiline/conf.py @@ -0,0 +1,9 @@ +copyright = ( + '2006', + '2006-2009, Alice', + '2010-2013, Bob', + '2014-2017, Charlie', + '2018-2021, David', + '2022-2025, Eve', +) +html_theme = 'basic' diff --git a/tests/roots/test-copyright-multiline/index.rst b/tests/roots/test-copyright-multiline/index.rst new file mode 100644 index 0000000..aa32ae6 --- /dev/null +++ b/tests/roots/test-copyright-multiline/index.rst @@ -0,0 +1,3 @@ +======================== +test-copyright-multiline +======================== diff --git a/tests/roots/test-correct-year/conf.py b/tests/roots/test-correct-year/conf.py new file mode 100644 index 0000000..814c08b --- /dev/null +++ b/tests/roots/test-correct-year/conf.py @@ -0,0 +1 @@ +copyright = '2006-2009, Author' diff --git a/tests/roots/test-correct-year/index.rst b/tests/roots/test-correct-year/index.rst new file mode 100644 index 0000000..938dfd5 --- /dev/null +++ b/tests/roots/test-correct-year/index.rst @@ -0,0 +1,4 @@ +================= +test-correct-year +================= + diff --git a/tests/roots/test-default_role/conf.py b/tests/roots/test-default_role/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-default_role/conf.py diff --git a/tests/roots/test-default_role/foo.rst b/tests/roots/test-default_role/foo.rst new file mode 100644 index 0000000..00e5ae5 --- /dev/null +++ b/tests/roots/test-default_role/foo.rst @@ -0,0 +1,4 @@ +foo.rst +======= + +`OK` button diff --git a/tests/roots/test-default_role/index.rst b/tests/roots/test-default_role/index.rst new file mode 100644 index 0000000..34c1855 --- /dev/null +++ b/tests/roots/test-default_role/index.rst @@ -0,0 +1,6 @@ +default_role +============ + +.. default-role:: pep + +`8` diff --git a/tests/roots/test-directive-code/caption.rst b/tests/roots/test-directive-code/caption.rst new file mode 100644 index 0000000..77c5c38 --- /dev/null +++ b/tests/roots/test-directive-code/caption.rst @@ -0,0 +1,52 @@ +Caption +======= + +References +---------- + +See :numref:`name *test* rb` and :numref:`name **test** py`. + +See :ref:`Ruby <name *test* rb>` and :ref:`Python <name **test** py>`. + + +Code blocks +----------- + +.. code-block:: ruby + :caption: caption *test* rb + + def ruby? + false + end + + +Literal Include +--------------- + +.. literalinclude:: literal.inc + :language: python + :caption: caption **test** py + :lines: 10-11 + + +Named Code blocks +----------------- + +.. code-block:: ruby + :name: name *test* rb + :caption: caption *test* rbnamed + + def ruby? + false + end + + +Named Literal Include +--------------------- + +.. literalinclude:: literal.inc + :language: python + :name: name **test** py + :caption: caption **test** pynamed + :lines: 10-11 + diff --git a/tests/roots/test-directive-code/classes.rst b/tests/roots/test-directive-code/classes.rst new file mode 100644 index 0000000..e9aa5d9 --- /dev/null +++ b/tests/roots/test-directive-code/classes.rst @@ -0,0 +1,21 @@ +classes +======= + +Code blocks +----------- + +.. code-block:: ruby + :class: foo bar + :name: code_block + + def ruby? + false + end + + +Literal Includes +---------------- + +.. literalinclude:: literal.inc + :class: bar baz + :name: literal_include diff --git a/tests/roots/test-directive-code/conf.py b/tests/roots/test-directive-code/conf.py new file mode 100644 index 0000000..f1e3a2c --- /dev/null +++ b/tests/roots/test-directive-code/conf.py @@ -0,0 +1,2 @@ +exclude_patterns = ['_build'] +numfig = True diff --git a/tests/roots/test-directive-code/dedent.rst b/tests/roots/test-directive-code/dedent.rst new file mode 100644 index 0000000..66ac91c --- /dev/null +++ b/tests/roots/test-directive-code/dedent.rst @@ -0,0 +1,64 @@ +dedent option +------------- + +.. code-block:: + + First line + Second line + Third line + Fourth line + +ReST has no fixed indent and only a change in indentation is significant not the amount [1]_. +Thus, the following code inside the code block is not indent even it looks so with respect to the previous block. + +.. code-block:: + + First line + Second line + Third line + Fourth line + +Having an option "fixates" the indent to be 3 spaces, thus the code inside the code block is indented by 4 spaces. + +.. code-block:: + :class: dummy + + First line + Second line + Third line + Fourth line + +The code has 6 spaces indent, minus 4 spaces dedent should yield a 2 space indented code in the output. + +.. code-block:: + :dedent: 4 + + First line + Second line + Third line + Fourth line + +Dedenting by zero, should not strip any spaces and be a no-op. + +.. note:: + This can be used as an alternative to ``:class: dummy`` above, to fixate the ReST indentation of the block. + +.. code-block:: + :dedent: 0 + + First line + Second line + Third line + Fourth line + +Dedent without argument should autostrip common whitespace at the beginning. + +.. code-block:: + :dedent: + + First line + Second line + Third line + Fourth line + +.. [1] https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#indentation diff --git a/tests/roots/test-directive-code/emphasize.rst b/tests/roots/test-directive-code/emphasize.rst new file mode 100644 index 0000000..95db574 --- /dev/null +++ b/tests/roots/test-directive-code/emphasize.rst @@ -0,0 +1,7 @@ +Literal Includes with Highlighted Lines +======================================= + +.. literalinclude:: target.py + :language: python + :emphasize-lines: 5-6, 13-15, 24- + diff --git a/tests/roots/test-directive-code/empty.inc b/tests/roots/test-directive-code/empty.inc new file mode 100644 index 0000000..b28b04f --- /dev/null +++ b/tests/roots/test-directive-code/empty.inc @@ -0,0 +1,3 @@ + + + diff --git a/tests/roots/test-directive-code/error.inc b/tests/roots/test-directive-code/error.inc new file mode 100644 index 0000000..4728280 --- /dev/null +++ b/tests/roots/test-directive-code/error.inc @@ -0,0 +1 @@ +not a python script! diff --git a/tests/roots/test-directive-code/force.rst b/tests/roots/test-directive-code/force.rst new file mode 100644 index 0000000..1834b3a --- /dev/null +++ b/tests/roots/test-directive-code/force.rst @@ -0,0 +1,16 @@ +force option +============ + +.. code:: python + :force: + + not a python script! + +.. code-block:: python + :force: + + not a python script! + +.. literalinclude:: error.inc + :language: python + :force: diff --git a/tests/roots/test-directive-code/highlight.rst b/tests/roots/test-directive-code/highlight.rst new file mode 100644 index 0000000..4191b58 --- /dev/null +++ b/tests/roots/test-directive-code/highlight.rst @@ -0,0 +1,20 @@ +highlight +--------- + +.. code-block:: + + "A code-block without no language" + +.. code-block:: python2 + + "A code-block with language argument" + +.. highlight:: python3 + +.. code-block:: + + "A code-block without no language after highlight directive" + +.. code-block:: python2 + + "A code-block without language argument after highlight directive" diff --git a/tests/roots/test-directive-code/index.rst b/tests/roots/test-directive-code/index.rst new file mode 100644 index 0000000..dab6b70 --- /dev/null +++ b/tests/roots/test-directive-code/index.rst @@ -0,0 +1,25 @@ +test-directive-code +=================== + +.. toctree:: + :glob: + + * + + +Code blocks +----------- + +.. code-block:: ruby + :linenos: + + def ruby? + false + end + + +Literal Includes +---------------- + +.. literalinclude:: literal.inc + :language: python diff --git a/tests/roots/test-directive-code/linenos.rst b/tests/roots/test-directive-code/linenos.rst new file mode 100644 index 0000000..a8e5b69 --- /dev/null +++ b/tests/roots/test-directive-code/linenos.rst @@ -0,0 +1,18 @@ +Literal Includes with Line Numbers +================================== + +.. literalinclude:: literal.inc + :language: python + :linenos: + +.. literalinclude:: literal.inc + :language: python + :lineno-start: 200 + +.. literalinclude:: literal.inc + :language: python + :lines: 5-9 + :lineno-match: + +.. literalinclude:: empty.inc + :lineno-match: diff --git a/tests/roots/test-directive-code/linenothreshold.rst b/tests/roots/test-directive-code/linenothreshold.rst new file mode 100644 index 0000000..09ee67e --- /dev/null +++ b/tests/roots/test-directive-code/linenothreshold.rst @@ -0,0 +1,23 @@ +Code Blocks and Literal Includes with Line Numbers via linenothreshold +====================================================================== + +.. highlight:: python + :linenothreshold: 5 + +.. code-block:: + + class Foo: + pass + + class Bar: + def baz(): + pass + +.. code-block:: + + # comment + value = True + +.. literalinclude:: literal.inc + +.. literalinclude:: literal-short.inc diff --git a/tests/roots/test-directive-code/literal-diff.inc b/tests/roots/test-directive-code/literal-diff.inc new file mode 100644 index 0000000..f9c21e3 --- /dev/null +++ b/tests/roots/test-directive-code/literal-diff.inc @@ -0,0 +1,13 @@ +# Literally included file using Python highlighting + +foo = "Including Unicode characters: üöä" + +class Foo: + pass + +class Bar: + def baz(self): + pass + +# comment after Bar class +def bar(): pass diff --git a/tests/roots/test-directive-code/literal-short.inc b/tests/roots/test-directive-code/literal-short.inc new file mode 100644 index 0000000..7a07a3f --- /dev/null +++ b/tests/roots/test-directive-code/literal-short.inc @@ -0,0 +1,3 @@ +# Very small literal include (linenothreshold check) + +value = True diff --git a/tests/roots/test-directive-code/literal.inc b/tests/roots/test-directive-code/literal.inc new file mode 100644 index 0000000..fa8f0ca --- /dev/null +++ b/tests/roots/test-directive-code/literal.inc @@ -0,0 +1,13 @@ +# Literally included file using Python highlighting + +foo = "Including Unicode characters: üöä" + +class Foo: + pass + +class Bar: + def baz(): + pass + +# comment after Bar class definition +def bar(): pass diff --git a/tests/roots/test-directive-code/namedblocks.rst b/tests/roots/test-directive-code/namedblocks.rst new file mode 100644 index 0000000..5779bc9 --- /dev/null +++ b/tests/roots/test-directive-code/namedblocks.rst @@ -0,0 +1,28 @@ +Named Blocks +============ + +References to named blocks +-------------------------- + +See :ref:`the ruby code <some ruby code>` and +also :ref:`the python code <some python code>`. + + +Named Code block +---------------- + +.. code-block:: ruby + :name: some ruby code + + def ruby? + false + end + + +Named Literal Include +--------------------- + +.. literalinclude:: literal.inc + :language: python + :name: some python code + diff --git a/tests/roots/test-directive-code/py-decorators.inc b/tests/roots/test-directive-code/py-decorators.inc new file mode 100644 index 0000000..012d5d5 --- /dev/null +++ b/tests/roots/test-directive-code/py-decorators.inc @@ -0,0 +1,15 @@ +# Literally included file using Python highlighting + +@class_decorator +@other_decorator() +class TheClass(object): + + @method_decorator + @other_decorator() + def the_method(): + pass + +@function_decorator +@other_decorator() +def the_function(): + pass diff --git a/tests/roots/test-directive-code/py-decorators.rst b/tests/roots/test-directive-code/py-decorators.rst new file mode 100644 index 0000000..31417f5 --- /dev/null +++ b/tests/roots/test-directive-code/py-decorators.rst @@ -0,0 +1,17 @@ +py-decorators +============= + +Various decorators +------------------ + +.. literalinclude:: py-decorators.inc + :name: literal_include_pydecorators_1 + :pyobject: TheClass + +.. literalinclude:: py-decorators.inc + :name: literal_include_pydecorators_2 + :pyobject: TheClass.the_method + +.. literalinclude:: py-decorators.inc + :name: literal_include_pydecorators_3 + :pyobject: the_function diff --git a/tests/roots/test-directive-code/python.rst b/tests/roots/test-directive-code/python.rst new file mode 100644 index 0000000..794c190 --- /dev/null +++ b/tests/roots/test-directive-code/python.rst @@ -0,0 +1,13 @@ +===========================
+Literal Includes for python
+===========================
+
+block start with blank or comment
+=================================
+
+.. literalinclude:: target.py
+ :pyobject: block_start_with_comment
+
+.. literalinclude:: target.py
+ :pyobject: block_start_with_blank
+
diff --git a/tests/roots/test-directive-code/target.py b/tests/roots/test-directive-code/target.py new file mode 100644 index 0000000..b95dffb --- /dev/null +++ b/tests/roots/test-directive-code/target.py @@ -0,0 +1,26 @@ +# Literally included file using Python highlighting + +foo = "Including Unicode characters: üöä" + +class Foo: + pass + +class Bar: + def baz(): + pass + +# comment after Bar class definition +def bar(): pass + +def block_start_with_comment(): + # Comment + return 1 + +def block_start_with_blank(): + + return 1 + + +class Qux: + def quux(self): + pass diff --git a/tests/roots/test-directive-csv-table/conf.py b/tests/roots/test-directive-csv-table/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-directive-csv-table/conf.py diff --git a/tests/roots/test-directive-csv-table/example.csv b/tests/roots/test-directive-csv-table/example.csv new file mode 100644 index 0000000..eb039aa --- /dev/null +++ b/tests/roots/test-directive-csv-table/example.csv @@ -0,0 +1 @@ +foo,bar,baz diff --git a/tests/roots/test-directive-csv-table/subdir/example.csv b/tests/roots/test-directive-csv-table/subdir/example.csv new file mode 100644 index 0000000..32fe56f --- /dev/null +++ b/tests/roots/test-directive-csv-table/subdir/example.csv @@ -0,0 +1 @@ +FOO,BAR,BAZ diff --git a/tests/roots/test-directive-include/bar.txt b/tests/roots/test-directive-include/bar.txt new file mode 100644 index 0000000..c0fef4c --- /dev/null +++ b/tests/roots/test-directive-include/bar.txt @@ -0,0 +1 @@ +Text from :file:`bar.txt`. diff --git a/tests/roots/test-directive-include/baz/baz.rst b/tests/roots/test-directive-include/baz/baz.rst new file mode 100644 index 0000000..0b74be0 --- /dev/null +++ b/tests/roots/test-directive-include/baz/baz.rst @@ -0,0 +1,6 @@ +Baz +=== + +.. include:: foo.rst + +Baz was here. diff --git a/tests/roots/test-directive-include/conf.py b/tests/roots/test-directive-include/conf.py new file mode 100644 index 0000000..a476858 --- /dev/null +++ b/tests/roots/test-directive-include/conf.py @@ -0,0 +1,2 @@ +project = 'test-directive-include' +exclude_patterns = ['_build'] diff --git a/tests/roots/test-directive-include/foo.rst b/tests/roots/test-directive-include/foo.rst new file mode 100644 index 0000000..0f82e66 --- /dev/null +++ b/tests/roots/test-directive-include/foo.rst @@ -0,0 +1 @@ +The #magical foo. diff --git a/tests/roots/test-directive-include/text.txt b/tests/roots/test-directive-include/text.txt new file mode 100644 index 0000000..b7ea15d --- /dev/null +++ b/tests/roots/test-directive-include/text.txt @@ -0,0 +1 @@ +This is plain text. diff --git a/tests/roots/test-directive-only/conf.py b/tests/roots/test-directive-only/conf.py new file mode 100644 index 0000000..191d0f5 --- /dev/null +++ b/tests/roots/test-directive-only/conf.py @@ -0,0 +1,2 @@ +project = 'test-directive-only' +exclude_patterns = ['_build'] diff --git a/tests/roots/test-directive-only/index.rst b/tests/roots/test-directive-only/index.rst new file mode 100644 index 0000000..80ec003 --- /dev/null +++ b/tests/roots/test-directive-only/index.rst @@ -0,0 +1,6 @@ +test-directive-only +=================== + +.. toctree:: + + only diff --git a/tests/roots/test-directive-only/only.rst b/tests/roots/test-directive-only/only.rst new file mode 100644 index 0000000..4a3eb48 --- /dev/null +++ b/tests/roots/test-directive-only/only.rst @@ -0,0 +1,203 @@ + +1. Sections in only directives +============================== + +Testing sections in only directives. + +.. only:: nonexisting_tag + + Skipped Section + --------------- + Should not be here. + +.. only:: not nonexisting_tag + + 1.1. Section + ------------ + Should be here. + +1.2. Section +------------ + +.. only:: not nonexisting_tag + + 1.2.1. Subsection + ~~~~~~~~~~~~~~~~~ + Should be here. + +.. only:: nonexisting_tag + + Skipped Subsection + ~~~~~~~~~~~~~~~~~~ + Should not be here. + +1.3. Section +------------ + +1.3.1. Subsection +~~~~~~~~~~~~~~~~~ +Should be here. + +1.4. Section +------------ + +.. only:: not nonexisting_tag + + 1.4.1. Subsection + ~~~~~~~~~~~~~~~~~ + Should be here. + +1.5. Section +------------ + +.. only:: not nonexisting_tag + + 1.5.1. Subsection + ~~~~~~~~~~~~~~~~~ + Should be here. + +1.5.2. Subsection +~~~~~~~~~~~~~~~~~ +Should be here. + +1.6. Section +------------ + +1.6.1. Subsection +~~~~~~~~~~~~~~~~~ +Should be here. + +.. only:: not nonexisting_tag + + 1.6.2. Subsection + ~~~~~~~~~~~~~~~~~ + Should be here. + +1.6.3. Subsection +~~~~~~~~~~~~~~~~~ +Should be here. + +1.7. Section +------------ + +1.7.1. Subsection +~~~~~~~~~~~~~~~~~ +Should be here. + +.. only:: not nonexisting_tag + + 1.7.1.1. Subsubsection + ...................... + Should be here. + +1.8. Section +------------ + +1.8.1. Subsection +~~~~~~~~~~~~~~~~~ +Should be here. + +1.8.1.1. Subsubsection +...................... +Should be here. + +.. only:: not nonexisting_tag + + 1.8.1.2. Subsubsection + ...................... + Should be here. + +1.9. Section +------------ + +.. only:: nonexisting_tag + + Skipped Subsection + ~~~~~~~~~~~~~~~~~~ + +1.9.1. Subsection +~~~~~~~~~~~~~~~~~ +Should be here. + +1.9.1.1. Subsubsection +...................... +Should be here. + +.. only:: not nonexisting_tag + + 1.10. Section + ------------- + Should be here. + +1.11. Section +------------- + +Text before subsection 11.1. + +.. only:: not nonexisting_tag + + More text before subsection 11.1. + + 1.11.1. Subsection + ~~~~~~~~~~~~~~~~~~ + Should be here. + +Text after subsection 11.1. + +.. only:: not nonexisting_tag + + 1.12. Section + ------------- + Should be here. + + 1.12.1. Subsection + ~~~~~~~~~~~~~~~~~~ + Should be here. + + 1.13. Section + ------------- + Should be here. + +.. only:: not nonexisting_tag + + 1.14. Section + ------------- + Should be here. + + .. only:: not nonexisting_tag + + 1.14.1. Subsection + ~~~~~~~~~~~~~~~~~~ + Should be here. + + 1.15. Section + ------------- + Should be here. + +.. only:: nonexisting_tag + + Skipped document level heading + ============================== + Should not be here. + +.. only:: not nonexisting_tag + + 2. Included document level heading + ================================== + Should be here. + +3. Document level heading +========================= +Should be here. + +.. only:: nonexisting_tag + + Skipped document level heading + ============================== + Should not be here. + +.. only:: not nonexisting_tag + + 4. Another included document level heading + ========================================== + Should be here. diff --git a/tests/roots/test-directives-raw/conf.py b/tests/roots/test-directives-raw/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-directives-raw/conf.py diff --git a/tests/roots/test-directives-raw/index.rst b/tests/roots/test-directives-raw/index.rst new file mode 100644 index 0000000..401ab73 --- /dev/null +++ b/tests/roots/test-directives-raw/index.rst @@ -0,0 +1,40 @@ +test-directives-raw +=================== + +HTML +---- + +standard +^^^^^^^^ + +.. raw:: html + + standalone raw directive (HTML) + +with substitution +^^^^^^^^^^^^^^^^^ + +HTML: abc |HTML_RAW| ghi + +.. |HTML_RAW| raw:: html + + def + +LaTeX +----- + +standard +^^^^^^^^ + +.. raw:: latex + + standalone raw directive (LaTeX) + +with substitution +^^^^^^^^^^^^^^^^^ + +LaTeX: abc |LATEX_RAW| ghi + +.. |LATEX_RAW| raw:: latex + + def diff --git a/tests/roots/test-docutilsconf/conf.py b/tests/roots/test-docutilsconf/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-docutilsconf/conf.py diff --git a/tests/roots/test-docutilsconf/docutils.conf b/tests/roots/test-docutilsconf/docutils.conf new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-docutilsconf/docutils.conf diff --git a/tests/roots/test-docutilsconf/index.rst b/tests/roots/test-docutilsconf/index.rst new file mode 100644 index 0000000..d292e32 --- /dev/null +++ b/tests/roots/test-docutilsconf/index.rst @@ -0,0 +1,6 @@ +test-docutilsconf +================== + +Sphinx [1]_ + +.. [1] Python Documentation Generator diff --git a/tests/roots/test-domain-c-c_maximum_signature_line_length/conf.py b/tests/roots/test-domain-c-c_maximum_signature_line_length/conf.py new file mode 100644 index 0000000..ba480ed --- /dev/null +++ b/tests/roots/test-domain-c-c_maximum_signature_line_length/conf.py @@ -0,0 +1 @@ +c_maximum_signature_line_length = len("str hello(str name)") - 1 diff --git a/tests/roots/test-domain-c-c_maximum_signature_line_length/index.rst b/tests/roots/test-domain-c-c_maximum_signature_line_length/index.rst new file mode 100644 index 0000000..be20940 --- /dev/null +++ b/tests/roots/test-domain-c-c_maximum_signature_line_length/index.rst @@ -0,0 +1,4 @@ +domain-c-c_maximum_signature_line_length +======================================== + +.. c:function:: str hello(str name) diff --git a/tests/roots/test-domain-c-intersphinx/conf.py b/tests/roots/test-domain-c-intersphinx/conf.py new file mode 100644 index 0000000..c176af7 --- /dev/null +++ b/tests/roots/test-domain-c-intersphinx/conf.py @@ -0,0 +1,4 @@ +exclude_patterns = ['_build'] +extensions = [ + 'sphinx.ext.intersphinx', +] diff --git a/tests/roots/test-domain-c-intersphinx/index.rst b/tests/roots/test-domain-c-intersphinx/index.rst new file mode 100644 index 0000000..5d6d3e0 --- /dev/null +++ b/tests/roots/test-domain-c-intersphinx/index.rst @@ -0,0 +1,62 @@ +.. c:member:: void __member = _member + + - :any:`_member` + - :c:member:`_member` + - :c:var:`_member` + - :c:data:`_member` + +.. c:member:: void __var = _var + + - :any:`_var` + - :c:member:`_var` + - :c:var:`_var` + - :c:data:`_var` + +.. c:member:: void __function = _function + + - :any:`_function` + - :c:func:`_function` + - :c:type:`_function` + +.. c:member:: void __macro = _macro + + - :any:`_macro` + - :c:macro:`_macro` + +.. c:type:: _struct __struct + struct _struct __structTagged + + - :any:`_struct` + - :c:struct:`_struct` + - :c:type:`_struct` + +.. c:type:: _union __union + union _union __unionTagged + + - :any:`_union` + - :c:union:`_union` + - :c:type:`_union` + +.. c:type:: _enum __enum + enum _enum __enumTagged + + - :any:`_enum` + - :c:enum:`_enum` + - :c:type:`_enum` + +.. c:member:: void __enumerator = _enumerator + + - :any:`_enumerator` + - :c:enumerator:`_enumerator` + +.. c:type:: _type __type + + - :any:`_type` + - :c:type:`_type` + +.. c:member:: void __functionParam = _functionParam.param + + - :any:`_functionParam.param` + - :c:member:`_functionParam.param` + - :c:var:`_functionParam.param` + - :c:data:`_functionParam.param` diff --git a/tests/roots/test-domain-c/anon-dup-decl.rst b/tests/roots/test-domain-c/anon-dup-decl.rst new file mode 100644 index 0000000..743ae2f --- /dev/null +++ b/tests/roots/test-domain-c/anon-dup-decl.rst @@ -0,0 +1,7 @@ +.. c:namespace:: anon_dup_decl_ns + +.. c:struct:: anon_dup_decl + + .. c:struct:: @a.A + .. c:struct:: @b.A + .. c:struct:: A diff --git a/tests/roots/test-domain-c/conf.py b/tests/roots/test-domain-c/conf.py new file mode 100644 index 0000000..a45d22e --- /dev/null +++ b/tests/roots/test-domain-c/conf.py @@ -0,0 +1 @@ +exclude_patterns = ['_build'] diff --git a/tests/roots/test-domain-c/field-role.rst b/tests/roots/test-domain-c/field-role.rst new file mode 100644 index 0000000..5452db5 --- /dev/null +++ b/tests/roots/test-domain-c/field-role.rst @@ -0,0 +1,4 @@ +.. c:function:: void f(int a, int *b) + + :param int a: + :param int* b: diff --git a/tests/roots/test-domain-c/function_param_target.rst b/tests/roots/test-domain-c/function_param_target.rst new file mode 100644 index 0000000..d316d7b --- /dev/null +++ b/tests/roots/test-domain-c/function_param_target.rst @@ -0,0 +1,7 @@ +.. c:namespace:: function_param_target + +.. c:function:: void f(int i) + + - :c:var:`i` + +- :c:var:`f.i` diff --git a/tests/roots/test-domain-c/index.rst b/tests/roots/test-domain-c/index.rst new file mode 100644 index 0000000..4febd63 --- /dev/null +++ b/tests/roots/test-domain-c/index.rst @@ -0,0 +1,54 @@ +.. c:namespace:: index + +test-domain-c +============= + +directives +---------- + +.. c:function:: int hello(const char *name) + + :rtype: int + +.. c:function:: MyStruct hello2(char *name) + + :rtype: MyStruct + +.. c:member:: float Sphinx.version +.. c:var:: int version + +.. c:macro:: IS_SPHINX +.. c:macro:: SPHINX(arg1, arg2) + +.. c:struct:: MyStruct +.. c:union:: MyUnion +.. c:enum:: MyEnum + + .. c:enumerator:: MyEnumerator + + :c:enumerator:`MyEnumerator` + + :c:enumerator:`MyEnumerator` + +:c:enumerator:`MyEnumerator` + +.. c:type:: Sphinx +.. c:type:: int SphinxVersionNum + + +.. c:struct:: A + + .. c:union:: @data + + .. c:member:: int a + +- :c:member:`A.@data.a` +- :c:member:`A.a` + +- :c:expr:`unsigned int` +- :c:texpr:`unsigned int` + +.. c:var:: A a + +- :c:expr:`a->b` +- :c:texpr:`a->b` diff --git a/tests/roots/test-domain-c/namespace.rst b/tests/roots/test-domain-c/namespace.rst new file mode 100644 index 0000000..c220d38 --- /dev/null +++ b/tests/roots/test-domain-c/namespace.rst @@ -0,0 +1,21 @@ +.. c:namespace:: NS + +.. c:var:: int NSVar + +.. c:namespace:: NULL + +.. c:var:: int NULLVar + +.. c:namespace:: NSDummy + +.. c:namespace:: 0 + +.. c:var:: int ZeroVar + +.. c:namespace-push:: NS2.NS3 + +.. c:var:: int NS2NS3Var + +.. c:namespace-pop:: + +.. c:var:: int PopVar diff --git a/tests/roots/test-domain-c/ns_lookup.rst b/tests/roots/test-domain-c/ns_lookup.rst new file mode 100644 index 0000000..87f9d68 --- /dev/null +++ b/tests/roots/test-domain-c/ns_lookup.rst @@ -0,0 +1,13 @@ +.. c:namespace:: ns_lookup + +.. c:var:: int i + +.. c:function:: void f(int j) + + - :c:var:`i` + - :c:var:`j` + - :c:expr:`i` + - :c:expr:`j` + +- :c:var:`i` +- :c:expr:`i` diff --git a/tests/roots/test-domain-cpp-cpp_maximum_signature_line_length/conf.py b/tests/roots/test-domain-cpp-cpp_maximum_signature_line_length/conf.py new file mode 100644 index 0000000..1eb3a64 --- /dev/null +++ b/tests/roots/test-domain-cpp-cpp_maximum_signature_line_length/conf.py @@ -0,0 +1 @@ +cpp_maximum_signature_line_length = len("str hello(str name)") - 1 diff --git a/tests/roots/test-domain-cpp-cpp_maximum_signature_line_length/index.rst b/tests/roots/test-domain-cpp-cpp_maximum_signature_line_length/index.rst new file mode 100644 index 0000000..425908c --- /dev/null +++ b/tests/roots/test-domain-cpp-cpp_maximum_signature_line_length/index.rst @@ -0,0 +1,4 @@ +domain-cpp-cpp_maximum_signature_line_length +============================================ + +.. cpp:function:: str hello(str name) diff --git a/tests/roots/test-domain-cpp-intersphinx/conf.py b/tests/roots/test-domain-cpp-intersphinx/conf.py new file mode 100644 index 0000000..c176af7 --- /dev/null +++ b/tests/roots/test-domain-cpp-intersphinx/conf.py @@ -0,0 +1,4 @@ +exclude_patterns = ['_build'] +extensions = [ + 'sphinx.ext.intersphinx', +] diff --git a/tests/roots/test-domain-cpp-intersphinx/index.rst b/tests/roots/test-domain-cpp-intersphinx/index.rst new file mode 100644 index 0000000..9ed9493 --- /dev/null +++ b/tests/roots/test-domain-cpp-intersphinx/index.rst @@ -0,0 +1,112 @@ +.. cpp:type:: _class __class + + - :any:`_class` + - :cpp:any:`_class` + - :cpp:class:`_class` + - :cpp:struct:`_class` + - :cpp:type:`_class` + +.. cpp:type:: _struct __struct + + - :any:`_struct` + - :cpp:any:`_struct` + - :cpp:class:`_struct` + - :cpp:struct:`_struct` + - :cpp:type:`_struct` + +.. cpp:type:: _union __union + + - :any:`_union` + - :cpp:any:`_union` + - :cpp:union:`_union` + - :cpp:type:`_union` + +.. cpp:member:: void __function = _function + + - :any:`_function` + - :cpp:any:`_function` + - :cpp:func:`_function` + - :cpp:type:`_function` + +.. cpp:member:: void __member = _member + + - :any:`_member` + - :cpp:any:`_member` + - :cpp:member:`_member` + - :cpp:var:`_member` + +.. cpp:member:: void __var = _var + + - :any:`_var` + - :cpp:any:`_var` + - :cpp:member:`_var` + - :cpp:var:`_var` + +.. cpp:type:: _type __type + + - :any:`_type` + - :cpp:any:`_type` + - :cpp:type:`_type` + +.. cpp:function:: template<_concept T> void __concept() + + - :any:`_concept` + - :cpp:any:`_concept` + - :cpp:concept:`_concept` + +.. cpp:type:: _enum __enum + + - :any:`_enum` + - :cpp:any:`_enum` + - :cpp:enum:`_enum` + - :cpp:type:`_enum` + +.. cpp:type:: _enumStruct __enumStruct + + - :any:`_enumStruct` + - :cpp:any:`_enumStruct` + - :cpp:enum:`_enumStruct` + - :cpp:type:`_enumStruct` + +.. cpp:type:: _enumClass __enumClass + + - :any:`_enumClass` + - :cpp:any:`_enumClass` + - :cpp:enum:`_enumClass` + - :cpp:type:`_enumClass` + +.. cpp:member:: void __enumerator = _enumerator + + - :any:`_enumerator` + - :cpp:any:`_enumerator` + - :cpp:enumerator:`_enumerator` + +.. cpp:member:: void __scopedEnumerator = _enumStruct::_scopedEnumerator + + - :any:`_enumStruct::_scopedEnumerator` + - :cpp:any:`_enumStruct::_scopedEnumerator` + - :cpp:enumerator:`_enumStruct::_scopedEnumerator` + +.. cpp:member:: void __enumerator2 = _enum::_enumerator + + - :any:`_enum::_enumerator` + - :cpp:any:`_enum::_enumerator` + - :cpp:enumerator:`_enum::_enumerator` + +.. cpp:member:: void __functionParam = _functionParam::param + + - :any:`_functionParam::param` + - :cpp:any:`_functionParam::param` + - :cpp:member:`_functionParam::param` + - :cpp:var:`_functionParam::param` + +.. cpp:type:: _templateParam::TParam __templateParam + + - :any:`_templateParam::TParam` + - :cpp:any:`_templateParam::TParam` + - :cpp:type:`_templateParam::TParam` + - :cpp:member:`_templateParam::TParam` + - :cpp:var:`_templateParam::TParam` + - :cpp:class:`_templateParam::TParam` + - :cpp:struct:`_templateParam::TParam` + - :cpp:union:`_templateParam::TParam` diff --git a/tests/roots/test-domain-cpp/anon-dup-decl.rst b/tests/roots/test-domain-cpp/anon-dup-decl.rst new file mode 100644 index 0000000..89a9c95 --- /dev/null +++ b/tests/roots/test-domain-cpp/anon-dup-decl.rst @@ -0,0 +1,4 @@ +.. cpp:namespace:: anon_dup_decl +.. cpp:class:: @a::A +.. cpp:class:: @b::A +.. cpp:class:: A diff --git a/tests/roots/test-domain-cpp/any-role.rst b/tests/roots/test-domain-cpp/any-role.rst new file mode 100644 index 0000000..24b415e --- /dev/null +++ b/tests/roots/test-domain-cpp/any-role.rst @@ -0,0 +1,17 @@ +any role +-------- + +* :cpp:any:`Sphinx` +* :cpp:any:`Sphinx::version` +* :cpp:any:`version` +* :cpp:any:`List` +* :cpp:any:`MyEnum` + +* ref function without parens :cpp:any:`paren_1` +* ref function with parens :cpp:any:`paren_2()` +* ref function without parens, explicit title :cpp:any:`paren_3_title <paren_3>` +* ref function with parens, explicit title :cpp:any:`paren_4_title <paren_4()>` +* ref op call without parens :cpp:any:`paren_5::operator()` +* ref op call with parens :cpp:any:`paren_6::operator()()` +* ref op call without parens, explicit title :cpp:any:`paren_7_title <paren_7::operator()>` +* ref op call with parens, explicit title :cpp:any:`paren_8_title <paren_8::operator()()>` diff --git a/tests/roots/test-domain-cpp/backslash.rst b/tests/roots/test-domain-cpp/backslash.rst new file mode 100644 index 0000000..c93e68e --- /dev/null +++ b/tests/roots/test-domain-cpp/backslash.rst @@ -0,0 +1 @@ +.. cpp:var:: char c = '\\' diff --git a/tests/roots/test-domain-cpp/conf.py b/tests/roots/test-domain-cpp/conf.py new file mode 100644 index 0000000..a45d22e --- /dev/null +++ b/tests/roots/test-domain-cpp/conf.py @@ -0,0 +1 @@ +exclude_patterns = ['_build'] diff --git a/tests/roots/test-domain-cpp/field-role.rst b/tests/roots/test-domain-cpp/field-role.rst new file mode 100644 index 0000000..1711a88 --- /dev/null +++ b/tests/roots/test-domain-cpp/field-role.rst @@ -0,0 +1,5 @@ +.. cpp:function:: void f() + + :throws int: + :throws int*: + diff --git a/tests/roots/test-domain-cpp/index.rst b/tests/roots/test-domain-cpp/index.rst new file mode 100644 index 0000000..2df5ec8 --- /dev/null +++ b/tests/roots/test-domain-cpp/index.rst @@ -0,0 +1,53 @@ +test-domain-cpp +=============== + +directives +---------- + +.. cpp:class:: public Sphinx + + The description of Sphinx class. + +.. cpp:function:: int hello(char *name) + + The description of hello function. + +.. cpp:member:: float Sphinx::version + + The description of Sphinx::version. + +.. cpp:var:: int version + + The description of version. + +.. cpp:type:: std::vector<int> List + + The description of List type. + +.. cpp:enum:: MyEnum + + An unscoped enum. + + .. cpp:enumerator:: A + +.. cpp:enum-class:: MyScopedEnum + + A scoped enum. + + .. cpp:enumerator:: B + +.. cpp:enum-struct:: protected MyScopedVisibilityEnum : std::underlying_type<MySpecificEnum>::type + + A scoped enum with non-default visibility, and with a specified underlying type. + + .. cpp:enumerator:: B + + +.. cpp:function:: void paren_1(int, float) +.. cpp:function:: void paren_2(int, float) +.. cpp:function:: void paren_3(int, float) +.. cpp:function:: void paren_4(int, float) +.. cpp:function:: void paren_5::operator()(int) +.. cpp:function:: void paren_6::operator()(int) +.. cpp:function:: void paren_7::operator()(int) +.. cpp:function:: void paren_8::operator()(int) diff --git a/tests/roots/test-domain-cpp/lookup-key-overload.rst b/tests/roots/test-domain-cpp/lookup-key-overload.rst new file mode 100644 index 0000000..2011e26 --- /dev/null +++ b/tests/roots/test-domain-cpp/lookup-key-overload.rst @@ -0,0 +1,8 @@ +.. default-domain:: cpp + +.. namespace:: lookup_key_overload + +.. function:: void g(int a) +.. function:: void g(double b) + + :var:`b` diff --git a/tests/roots/test-domain-cpp/multi-decl-lookup.rst b/tests/roots/test-domain-cpp/multi-decl-lookup.rst new file mode 100644 index 0000000..9706d18 --- /dev/null +++ b/tests/roots/test-domain-cpp/multi-decl-lookup.rst @@ -0,0 +1,24 @@ +.. default-domain:: cpp + +.. namespace:: multi_decl_lookup + +.. function:: void f1(int a) + void f1(double b) + + - a: :var:`a` + - b: :var:`b` + +.. function:: template<typename T> void f2(int a) + template<typename U> void f2(double b) + + - T: :type:`T` + - U: :type:`U` + + +.. class:: template<typename T> A + template<typename U> B + + .. function:: void f3() + + - T: :type:`T` + - U: :type:`U` diff --git a/tests/roots/test-domain-cpp/roles-targets-ok.rst b/tests/roots/test-domain-cpp/roles-targets-ok.rst new file mode 100644 index 0000000..783f7b9 --- /dev/null +++ b/tests/roots/test-domain-cpp/roles-targets-ok.rst @@ -0,0 +1,170 @@ +.. default-domain:: cpp + +.. namespace:: RolesTargetsOk + +.. class:: Class + + :cpp:any:`Class` + :class:`Class` + :struct:`Class` + union + func + member + var + :type:`Class` + concept + enum + enumerator + +.. union:: Union + + :cpp:any:`Union` + class + struct + :union:`Union` + func + member + var + :type:`Union` + concept + enum + enumerator + +.. function:: void Function() + + :cpp:any:`Function` + class + struct + union + :func:`Function` + member + var + :type:`Function` + concept + enum + enumerator + +.. var:: int Variable + + :cpp:any:`Variable` + class + struct + union + function + :member:`Variable` + :var:`Variables` + type + concept + enum + enumerator + +.. type:: Type = void + + :cpp:any:`Type` + class + struct + union + function + member + var + :type:`Type` + concept + enum + enumerator + +.. concept:: template<typename T> Concept + + :cpp:any:`Concept` + class + struct + union + function + member + var + type + :concept:`Concept` + enum + enumerator + +.. enum-struct:: Enum + + :cpp:any:`Enum` + class + struct + union + function + member + var + :type:`Enum` + concept + :enum:`Enum` + enumerator + + .. enumerator:: Enumerator + + :cpp:any:`Enumerator` + class + struct + union + function + member + var + type + concept + enum + :enumerator:`Enumerator` + +.. class:: template<typename TParamType, \ + int TParamVar, \ + template<typename> typename TParamTemplate \ + > ClassTemplate + + :cpp:any:`TParamType` + :class:`TParamType` + :struct:`TParamType` + :union:`TParamType` + function + :member:`TParamType` + :var:`TParamType` + :type:`TParamType` + concept + enum + enumerator + + :cpp:any:`TParamVar` + :class:`TParamVar` + :struct:`TParamVar` + :union:`TParamVar` + function + :member:`TParamVar` + :var:`TParamVar` + :type:`TParamVar` + concept + enum + enumerator + + :cpp:any:`TParamTemplate` + :class:`TParamTemplate` + :struct:`TParamTemplate` + :union:`TParamTemplate` + function + :member:`TParamTemplate` + :var:`TParamTemplate` + :type:`TParamTemplate` + concept + enum + enumerator + +.. function:: void FunctionParams(int FunctionParam) + + :cpp:any:`FunctionParam` + class + struct + union + function + :member:`FunctionParam` + :var:`FunctionParam` + type + concept + enum + enumerator diff --git a/tests/roots/test-domain-cpp/roles-targets-warn.rst b/tests/roots/test-domain-cpp/roles-targets-warn.rst new file mode 100644 index 0000000..57083ff --- /dev/null +++ b/tests/roots/test-domain-cpp/roles-targets-warn.rst @@ -0,0 +1,158 @@ +.. default-domain:: cpp + +.. namespace:: RolesTargetsWarn + +.. class:: Class + + class + struct + :union:`Class` + :func:`Class` + :member:`Class` + :var:`Class` + type + :concept:`Class` + :enum:`Class` + :enumerator:`Class` + +.. union:: Union + + :class:`Union` + :struct:`Union` + union + :func:`Union` + :member:`Union` + :var:`Union` + type + :concept:`Union` + :enum:`Union` + :enumerator:`Union` + +.. function:: void Function() + + :class:`Function` + :struct:`Function` + :union:`Function` + func + :member:`Function` + :var:`Function` + type + :concept:`Function` + :enum:`Function` + :enumerator:`Function` + +.. var:: int Variable + + :class:`Variable` + :struct:`Variable` + :union:`Variable` + :func:`Variable` + member + var + :type:`Variable` + :concept:`Variable` + :enum:`Variable` + :enumerator:`Variable` + +.. type:: Type = void + + :class:`Type` + :struct:`Type` + :union:`Type` + :func:`Type` + :member:`Type` + :var:`Type` + type + :concept:`Type` + :enum:`Type` + :enumerator:`Type` + +.. concept:: template<typename T> Concept + + :class:`Concept` + :struct:`Concept` + :union:`Concept` + :func:`Concept` + :member:`Concept` + :var:`Concept` + :type:`Concept` + concept + :enum:`Concept` + :enumerator:`Concept` + +.. enum-struct:: Enum + + :class:`Enum` + :struct:`Enum` + :union:`Enum` + :func:`Enum` + :member:`Enum` + :var:`Enum` + type + :concept:`Enum` + enum + :enumerator:`Enum` + + .. enumerator:: Enumerator + + :class:`Enumerator` + :struct:`Enumerator` + :union:`Enumerator` + :func:`Enumerator` + :member:`Enumerator` + :var:`Enumerator` + :type:`Enumerator` + :concept:`Enumerator` + :enum:`Enumerator` + enumerator + +.. class:: template<typename TParamType, \ + int TParamVar, \ + template<typename> typename TParamTemplate \ + > ClassTemplate + + class + struct + union + :func:`TParamType` + member + var + type + :concept:`TParamType` + :enum:`TParamType` + :enumerator:`TParamType` + + class + struct + union + :func:`TParamVar` + member + var + type + :concept:`TParamVar` + :enum:`TParamVar` + :enumerator:`TParamVar` + + class + struct + union + :func:`TParamTemplate` + member + var + type + :concept:`TParamTemplate` + :enum:`TParamTemplate` + :enumerator:`TParamTemplate` + +.. function:: void FunctionParams(int FunctionParam) + + :class:`FunctionParam` + :struct:`FunctionParam` + :union:`FunctionParam` + :func:`FunctionParam` + member + var + :type:`FunctionParam` + :concept:`FunctionParam` + :enum:`FunctionParam` + :enumerator:`FunctionParam` diff --git a/tests/roots/test-domain-cpp/roles.rst b/tests/roots/test-domain-cpp/roles.rst new file mode 100644 index 0000000..afd2ede --- /dev/null +++ b/tests/roots/test-domain-cpp/roles.rst @@ -0,0 +1,17 @@ +roles +----- + +* :cpp:class:`Sphinx` +* :cpp:member:`Sphinx::version` +* :cpp:var:`version` +* :cpp:type:`List` +* :cpp:enum:`MyEnum` + +* ref function without parens :cpp:func:`paren_1` +* ref function with parens :cpp:func:`paren_2()` +* ref function without parens, explicit title :cpp:func:`paren_3_title <paren_3>` +* ref function with parens, explicit title :cpp:func:`paren_4_title <paren_4()>` +* ref op call without parens :cpp:func:`paren_5::operator()` +* ref op call with parens :cpp:func:`paren_6::operator()()` +* ref op call without parens, explicit title :cpp:func:`paren_7_title <paren_7::operator()>` +* ref op call with parens, explicit title :cpp:func:`paren_8_title <paren_8::operator()()>` diff --git a/tests/roots/test-domain-cpp/roles2.rst b/tests/roots/test-domain-cpp/roles2.rst new file mode 100644 index 0000000..644b827 --- /dev/null +++ b/tests/roots/test-domain-cpp/roles2.rst @@ -0,0 +1,5 @@ +Check that we don't crash just because we misuse a role. + +.. cpp:class:: A + +:cpp:func:`A` diff --git a/tests/roots/test-domain-cpp/semicolon.rst b/tests/roots/test-domain-cpp/semicolon.rst new file mode 100644 index 0000000..e6b370e --- /dev/null +++ b/tests/roots/test-domain-cpp/semicolon.rst @@ -0,0 +1,14 @@ +.. cpp:class:: Class; +.. cpp:struct:: Struct; +.. cpp:union:: Union; +.. cpp:function:: void f(); +.. cpp:member:: int member; +.. cpp:var:: int var; +.. cpp:type:: Type; +.. cpp:type:: int TypeDef; +.. cpp:type:: Alias = int; +.. cpp:concept:: template<typename T> Concept; +.. cpp:enum:: Enum; +.. cpp:enum-struct:: EnumStruct; +.. cpp:enum-class:: EnumClass; +.. cpp:enumerator:: Enumerator; diff --git a/tests/roots/test-domain-cpp/warn-template-param-qualified-name.rst b/tests/roots/test-domain-cpp/warn-template-param-qualified-name.rst new file mode 100644 index 0000000..49a650d --- /dev/null +++ b/tests/roots/test-domain-cpp/warn-template-param-qualified-name.rst @@ -0,0 +1,11 @@ +.. default-domain:: cpp + +.. class:: template<typename T> A + + .. type:: N1 = T::typeOk + + - Not ok, warn: :type:`T::typeWarn` + + .. type:: N2 = T::U::typeOk + + - Not ok, warn: :type:`T::U::typeWarn` diff --git a/tests/roots/test-domain-cpp/xref_consistency.rst b/tests/roots/test-domain-cpp/xref_consistency.rst new file mode 100644 index 0000000..cb33000 --- /dev/null +++ b/tests/roots/test-domain-cpp/xref_consistency.rst @@ -0,0 +1,12 @@ +xref consistency +---------------- + +.. cpp:namespace:: xref_consistency + +.. cpp:class:: item + +code-role: :code:`item` +any-role: :any:`item` +cpp-any-role: :cpp:any:`item` +cpp-expr-role: :cpp:expr:`item` +cpp-texpr-role: :cpp:texpr:`item` diff --git a/tests/roots/test-domain-js-javascript_maximum_signature_line_length/conf.py b/tests/roots/test-domain-js-javascript_maximum_signature_line_length/conf.py new file mode 100644 index 0000000..d7c9331 --- /dev/null +++ b/tests/roots/test-domain-js-javascript_maximum_signature_line_length/conf.py @@ -0,0 +1 @@ +javascript_maximum_signature_line_length = 1 diff --git a/tests/roots/test-domain-js-javascript_maximum_signature_line_length/index.rst b/tests/roots/test-domain-js-javascript_maximum_signature_line_length/index.rst new file mode 100644 index 0000000..b79fc1a --- /dev/null +++ b/tests/roots/test-domain-js-javascript_maximum_signature_line_length/index.rst @@ -0,0 +1,6 @@ +domain-js-maximum_signature_line_length +======================================= + +.. js:function:: hello(name) + +.. js:function:: foo([a, [b, ]]c, d[, e, f]) diff --git a/tests/roots/test-domain-js/conf.py b/tests/roots/test-domain-js/conf.py new file mode 100644 index 0000000..a45d22e --- /dev/null +++ b/tests/roots/test-domain-js/conf.py @@ -0,0 +1 @@ +exclude_patterns = ['_build'] diff --git a/tests/roots/test-domain-js/index.rst b/tests/roots/test-domain-js/index.rst new file mode 100644 index 0000000..fb6b8c3 --- /dev/null +++ b/tests/roots/test-domain-js/index.rst @@ -0,0 +1,7 @@ +test-domain-js +============== + +.. toctree:: + + roles + module diff --git a/tests/roots/test-domain-js/module.rst b/tests/roots/test-domain-js/module.rst new file mode 100644 index 0000000..1fe6a21 --- /dev/null +++ b/tests/roots/test-domain-js/module.rst @@ -0,0 +1,27 @@ +module +======= + +.. js:module:: module_a.submodule + +* Link to :js:class:`ModTopLevel` + +.. js:class:: ModTopLevel + + * Link to :js:meth:`mod_child_1` + * Link to :js:meth:`ModTopLevel.mod_child_1` + +.. js:method:: ModTopLevel.mod_child_1 + + * Link to :js:meth:`mod_child_2` + +.. js:method:: ModTopLevel.mod_child_2 + + * Link to :js:meth:`module_a.submodule.ModTopLevel.mod_child_1` + +.. js:module:: module_b.submodule + +* Link to :js:class:`ModTopLevel` + +.. js:class:: ModTopLevel + + * Link to :js:mod:`module_a.submodule` diff --git a/tests/roots/test-domain-js/roles.rst b/tests/roots/test-domain-js/roles.rst new file mode 100644 index 0000000..4b6acf1 --- /dev/null +++ b/tests/roots/test-domain-js/roles.rst @@ -0,0 +1,48 @@ +roles +===== + +.. js:class:: TopLevel + +.. js:function:: top_level + +* :js:class:`TopLevel` +* :js:func:`top_level` + + +.. js:class:: NestedParentA + + * Link to :js:func:`child_1` + + .. js:function:: child_1() + + * Link to :js:func:`NestedChildA.subchild_2` + * Link to :js:func:`child_2` + * Link to :any:`any_child` + + .. js:function:: any_child() + + * Link to :js:class:`NestedChildA` + + .. js:class:: NestedChildA + + .. js:function:: subchild_1() + + * Link to :js:func:`subchild_2` + + .. js:function:: subchild_2() + + Link to :js:func:`NestedParentA.child_1` + + .. js:function:: child_2() + + Link to :js:func:`NestedChildA.subchild_1` + +.. js:class:: NestedParentB + + * Link to :js:func:`child_1` + + .. js:function:: child_1() + + * Link to :js:class:`NestedParentB` + +* :js:class:`NestedParentA.NestedChildA` diff --git a/tests/roots/test-domain-py-python_maximum_signature_line_length/conf.py b/tests/roots/test-domain-py-python_maximum_signature_line_length/conf.py new file mode 100644 index 0000000..45f620d --- /dev/null +++ b/tests/roots/test-domain-py-python_maximum_signature_line_length/conf.py @@ -0,0 +1 @@ +python_maximum_signature_line_length = 1 diff --git a/tests/roots/test-domain-py-python_maximum_signature_line_length/index.rst b/tests/roots/test-domain-py-python_maximum_signature_line_length/index.rst new file mode 100644 index 0000000..75e4683 --- /dev/null +++ b/tests/roots/test-domain-py-python_maximum_signature_line_length/index.rst @@ -0,0 +1,6 @@ +domain-py-maximum_signature_line_length +======================================= + +.. py:function:: hello(name: str) -> str + +.. py:function:: foo([a, [b, ]]c, d[, e, f]) diff --git a/tests/roots/test-domain-py-python_use_unqualified_type_names/conf.py b/tests/roots/test-domain-py-python_use_unqualified_type_names/conf.py new file mode 100644 index 0000000..c81bfe4 --- /dev/null +++ b/tests/roots/test-domain-py-python_use_unqualified_type_names/conf.py @@ -0,0 +1 @@ +python_use_unqualified_type_names = True diff --git a/tests/roots/test-domain-py-python_use_unqualified_type_names/index.rst b/tests/roots/test-domain-py-python_use_unqualified_type_names/index.rst new file mode 100644 index 0000000..a6850a0 --- /dev/null +++ b/tests/roots/test-domain-py-python_use_unqualified_type_names/index.rst @@ -0,0 +1,12 @@ +domain-py-smart_reference +========================= + +.. py:class:: Name + :module: foo + + :param name: blah blah + :type name: foo.Name + :param age: blah blah + :type age: foo.Age + +.. py:function:: hello(name: foo.Name, age: foo.Age) diff --git a/tests/roots/test-domain-py-xref-warning/conf.py b/tests/roots/test-domain-py-xref-warning/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-domain-py-xref-warning/conf.py diff --git a/tests/roots/test-domain-py-xref-warning/index.rst b/tests/roots/test-domain-py-xref-warning/index.rst new file mode 100644 index 0000000..6f2cab7 --- /dev/null +++ b/tests/roots/test-domain-py-xref-warning/index.rst @@ -0,0 +1,7 @@ +test-domain-py-xref-warning +=========================== + +.. _existing-label: + +:ref:`no-label` +:ref:`existing-label` diff --git a/tests/roots/test-domain-py/abbr.rst b/tests/roots/test-domain-py/abbr.rst new file mode 100644 index 0000000..67f1157 --- /dev/null +++ b/tests/roots/test-domain-py/abbr.rst @@ -0,0 +1,10 @@ +abbrev +====== + +.. currentmodule:: module_a.submodule + +* normal: :py:meth:`module_a.submodule.ModTopLevel.mod_child_1` +* relative: :py:meth:`.ModTopLevel.mod_child_1` +* short name: :py:meth:`~module_a.submodule.ModTopLevel.mod_child_1` +* relative + short name: :py:meth:`~.ModTopLevel.mod_child_1` +* short name + relative: :py:meth:`~.ModTopLevel.mod_child_1` diff --git a/tests/roots/test-domain-py/canonical.rst b/tests/roots/test-domain-py/canonical.rst new file mode 100644 index 0000000..34becfd --- /dev/null +++ b/tests/roots/test-domain-py/canonical.rst @@ -0,0 +1,12 @@ +caninical +========= + +:py:class:`.Foo` +:any:`Foo` +:any:`module.Foo` +:any:`original.module.Foo` + +.. py:module:: canonical + +.. py:class:: Foo + :canonical: original.module.Foo diff --git a/tests/roots/test-domain-py/conf.py b/tests/roots/test-domain-py/conf.py new file mode 100644 index 0000000..a45d22e --- /dev/null +++ b/tests/roots/test-domain-py/conf.py @@ -0,0 +1 @@ +exclude_patterns = ['_build'] diff --git a/tests/roots/test-domain-py/index.rst b/tests/roots/test-domain-py/index.rst new file mode 100644 index 0000000..b24bbea --- /dev/null +++ b/tests/roots/test-domain-py/index.rst @@ -0,0 +1,10 @@ +test-domain-py +============== + +.. toctree:: + + roles + module + module_option + abbr + canonical diff --git a/tests/roots/test-domain-py/module.rst b/tests/roots/test-domain-py/module.rst new file mode 100644 index 0000000..4a28068 --- /dev/null +++ b/tests/roots/test-domain-py/module.rst @@ -0,0 +1,60 @@ +module +====== + +.. py:module:: module_a.submodule + +* Link to :py:class:`ModTopLevel` + +.. py:class:: ModTopLevel + + * Link to :py:meth:`mod_child_1` + * Link to :py:meth:`ModTopLevel.mod_child_1` + +.. py:method:: ModTopLevel.mod_child_1 + + * Link to :py:meth:`mod_child_2` + +.. py:method:: ModTopLevel.mod_child_2 + + * Link to :py:meth:`module_a.submodule.ModTopLevel.mod_child_1` + +.. py:property:: ModTopLevel.prop + + * Link to :py:attr:`prop attribute <.prop>` + * Link to :py:meth:`prop method <.prop>` + +.. py:currentmodule:: None + +.. py:class:: ModNoModule + +.. py:module:: module_b.submodule + +* Link to :py:class:`ModTopLevel` + +.. py:class:: ModTopLevel + + * Link to :py:class:`ModNoModule` + +.. py:function:: foo(x, y) + + :param x: param x + :type x: int + :param y: param y + :type y: tuple(str, float) + :rtype: list + +.. py:attribute:: attr1 + + :type: ModTopLevel + +.. py:attribute:: attr2 + + :type: :doc:`index` + +.. py:module:: exceptions + +.. py:exception:: Exception + +.. py:module:: object + +.. py:function:: sum() diff --git a/tests/roots/test-domain-py/module_option.rst b/tests/roots/test-domain-py/module_option.rst new file mode 100644 index 0000000..1dec2ce --- /dev/null +++ b/tests/roots/test-domain-py/module_option.rst @@ -0,0 +1,25 @@ +module_option +============= + +.. py:class:: B + :module: test.extra + + This is also a test. + + + .. py:method:: B.baz() + :module: test.extra + + Does something similar to :meth:`foo`. + + + .. py:method:: B.foo() + :module: test.extra + + Does something. + + + .. py:method:: B.test() + :module: test.extra + + Does something completely unrelated to :meth:`foo` diff --git a/tests/roots/test-domain-py/roles.rst b/tests/roots/test-domain-py/roles.rst new file mode 100644 index 0000000..6bff2d2 --- /dev/null +++ b/tests/roots/test-domain-py/roles.rst @@ -0,0 +1,48 @@ +roles +===== + +.. py:class:: TopLevel + +.. py:method:: top_level + +* :py:class:`TopLevel` +* :py:meth:`top_level` + + +.. py:class:: NestedParentA + + * Link to :py:meth:`child_1` + + .. py:method:: child_1() + + * Link to :py:meth:`NestedChildA.subchild_2` + * Link to :py:meth:`child_2` + * Link to :any:`any_child` + + .. py:method:: any_child() + + * Link to :py:class:`NestedChildA` + + .. py:class:: NestedChildA + + .. py:method:: subchild_1() + + * Link to :py:meth:`subchild_2` + + .. py:method:: subchild_2() + + Link to :py:meth:`NestedParentA.child_1` + + .. py:method:: child_2() + + Link to :py:meth:`NestedChildA.subchild_1` + +.. py:class:: NestedParentB + + * Link to :py:meth:`child_1` + + .. py:method:: child_1() + + * Link to :py:class:`NestedParentB` + +* :py:class:`NestedParentA.NestedChildA` diff --git a/tests/roots/test-double-inheriting-theme/base_themes_dir/base_theme1/theme.conf b/tests/roots/test-double-inheriting-theme/base_themes_dir/base_theme1/theme.conf new file mode 100644 index 0000000..89e03bb --- /dev/null +++ b/tests/roots/test-double-inheriting-theme/base_themes_dir/base_theme1/theme.conf @@ -0,0 +1,2 @@ +[theme] +inherit = basic diff --git a/tests/roots/test-double-inheriting-theme/base_themes_dir/base_theme2/theme.conf b/tests/roots/test-double-inheriting-theme/base_themes_dir/base_theme2/theme.conf new file mode 100644 index 0000000..a68c018 --- /dev/null +++ b/tests/roots/test-double-inheriting-theme/base_themes_dir/base_theme2/theme.conf @@ -0,0 +1,2 @@ +[theme] +inherit = base_theme1 diff --git a/tests/roots/test-double-inheriting-theme/conf.py b/tests/roots/test-double-inheriting-theme/conf.py new file mode 100644 index 0000000..3667b02 --- /dev/null +++ b/tests/roots/test-double-inheriting-theme/conf.py @@ -0,0 +1,4 @@ +templates_path = ['_templates'] +html_theme = 'base_theme2' +html_theme_path = ['base_themes_dir'] +exclude_patterns = ['_build'] diff --git a/tests/roots/test-double-inheriting-theme/index.rst b/tests/roots/test-double-inheriting-theme/index.rst new file mode 100644 index 0000000..e67f7ff --- /dev/null +++ b/tests/roots/test-double-inheriting-theme/index.rst @@ -0,0 +1,3 @@ +============================ +Test double inheriting theme +============================ diff --git a/tests/roots/test-environment-record-dependencies/api.rst b/tests/roots/test-environment-record-dependencies/api.rst new file mode 100644 index 0000000..acfb896 --- /dev/null +++ b/tests/roots/test-environment-record-dependencies/api.rst @@ -0,0 +1,4 @@ +API +=== + +.. automodule:: example_module diff --git a/tests/roots/test-environment-record-dependencies/conf.py b/tests/roots/test-environment-record-dependencies/conf.py new file mode 100644 index 0000000..107480e --- /dev/null +++ b/tests/roots/test-environment-record-dependencies/conf.py @@ -0,0 +1,5 @@ +import os +import sys + +sys.path.insert(0, os.path.abspath('.')) +extensions = ['sphinx.ext.autodoc'] diff --git a/tests/roots/test-environment-record-dependencies/example_module.py b/tests/roots/test-environment-record-dependencies/example_module.py new file mode 100644 index 0000000..d12dc74 --- /dev/null +++ b/tests/roots/test-environment-record-dependencies/example_module.py @@ -0,0 +1,2 @@ +def example_function(): + return 42 diff --git a/tests/roots/test-environment-record-dependencies/index.rst b/tests/roots/test-environment-record-dependencies/index.rst new file mode 100644 index 0000000..21d88a0 --- /dev/null +++ b/tests/roots/test-environment-record-dependencies/index.rst @@ -0,0 +1,3 @@ +.. toctree:: + + api diff --git a/tests/roots/test-epub-anchor-id/conf.py b/tests/roots/test-epub-anchor-id/conf.py new file mode 100644 index 0000000..2a56f1f --- /dev/null +++ b/tests/roots/test-epub-anchor-id/conf.py @@ -0,0 +1,2 @@ +def setup(app): + app.add_crossref_type(directivename="setting", rolename="setting") diff --git a/tests/roots/test-epub-anchor-id/index.rst b/tests/roots/test-epub-anchor-id/index.rst new file mode 100644 index 0000000..75e3c60 --- /dev/null +++ b/tests/roots/test-epub-anchor-id/index.rst @@ -0,0 +1,13 @@ +test-epub-anchor-id +=================== + +.. setting:: STATICFILES_FINDERS + +blah blah blah + +.. setting:: STATICFILES_SECTION + +blah blah blah +============== + +see :setting:`STATICFILES_FINDERS` diff --git a/tests/roots/test-ext-autodoc/autodoc_dummy_bar.py b/tests/roots/test-ext-autodoc/autodoc_dummy_bar.py new file mode 100644 index 0000000..3b5bbfd --- /dev/null +++ b/tests/roots/test-ext-autodoc/autodoc_dummy_bar.py @@ -0,0 +1,6 @@ +from bug2437.autodoc_dummy_foo import Foo + + +class Bar: + """Dummy class Bar with alias.""" + my_name = Foo diff --git a/tests/roots/test-ext-autodoc/autodoc_dummy_module.py b/tests/roots/test-ext-autodoc/autodoc_dummy_module.py new file mode 100644 index 0000000..c05d96e --- /dev/null +++ b/tests/roots/test-ext-autodoc/autodoc_dummy_module.py @@ -0,0 +1,6 @@ +from dummy import * + + +def test(): + """Dummy function using dummy.*""" + dummy_function() diff --git a/tests/roots/test-ext-autodoc/bug2437/__init__.py b/tests/roots/test-ext-autodoc/bug2437/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-ext-autodoc/bug2437/__init__.py diff --git a/tests/roots/test-ext-autodoc/bug2437/autodoc_dummy_foo.py b/tests/roots/test-ext-autodoc/bug2437/autodoc_dummy_foo.py new file mode 100644 index 0000000..9c954d8 --- /dev/null +++ b/tests/roots/test-ext-autodoc/bug2437/autodoc_dummy_foo.py @@ -0,0 +1,3 @@ +class Foo: + """Dummy class Foo.""" + pass diff --git a/tests/roots/test-ext-autodoc/circular_import/__init__.py b/tests/roots/test-ext-autodoc/circular_import/__init__.py new file mode 100644 index 0000000..402678d --- /dev/null +++ b/tests/roots/test-ext-autodoc/circular_import/__init__.py @@ -0,0 +1 @@ +from circular_import.c import SomeClass diff --git a/tests/roots/test-ext-autodoc/circular_import/a.py b/tests/roots/test-ext-autodoc/circular_import/a.py new file mode 100644 index 0000000..97ad9d8 --- /dev/null +++ b/tests/roots/test-ext-autodoc/circular_import/a.py @@ -0,0 +1 @@ +X = 42 diff --git a/tests/roots/test-ext-autodoc/circular_import/b.py b/tests/roots/test-ext-autodoc/circular_import/b.py new file mode 100644 index 0000000..c9b8ad5 --- /dev/null +++ b/tests/roots/test-ext-autodoc/circular_import/b.py @@ -0,0 +1,4 @@ +import typing + +if typing.TYPE_CHECKING: + from circular_import import SomeClass diff --git a/tests/roots/test-ext-autodoc/circular_import/c.py b/tests/roots/test-ext-autodoc/circular_import/c.py new file mode 100644 index 0000000..0a8829e --- /dev/null +++ b/tests/roots/test-ext-autodoc/circular_import/c.py @@ -0,0 +1,6 @@ +import circular_import.a +import circular_import.b + + +class SomeClass: + X = circular_import.a.X diff --git a/tests/roots/test-ext-autodoc/conf.py b/tests/roots/test-ext-autodoc/conf.py new file mode 100644 index 0000000..979a709 --- /dev/null +++ b/tests/roots/test-ext-autodoc/conf.py @@ -0,0 +1,15 @@ +import os +import sys + +sys.path.insert(0, os.path.abspath('.')) + +extensions = ['sphinx.ext.autodoc'] + +# The suffix of source filenames. +source_suffix = '.rst' + +autodoc_mock_imports = [ + 'dummy' +] + +nitpicky = True diff --git a/tests/roots/test-ext-autodoc/index.rst b/tests/roots/test-ext-autodoc/index.rst new file mode 100644 index 0000000..eb10829 --- /dev/null +++ b/tests/roots/test-ext-autodoc/index.rst @@ -0,0 +1,15 @@ + +.. automodule:: autodoc_dummy_module + :members: + +.. automodule:: bug2437.autodoc_dummy_foo + :members: + +.. automodule:: autodoc_dummy_bar + :members: + +.. autofunction:: target.typehints.incr + +.. autofunction:: target.overload.sum + +.. autofunction:: target.typehints.tuple_args diff --git a/tests/roots/test-ext-autodoc/target/TYPE_CHECKING.py b/tests/roots/test-ext-autodoc/target/TYPE_CHECKING.py new file mode 100644 index 0000000..85aea3a --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/TYPE_CHECKING.py @@ -0,0 +1,16 @@ +from __future__ import annotations + +from gettext import NullTranslations +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from collections.abc import Iterable + from io import StringIO + + +class Foo: + attr1: StringIO + + +def spam(ham: Iterable[str]) -> tuple[NullTranslations, bool]: + pass diff --git a/tests/roots/test-ext-autodoc/target/__init__.py b/tests/roots/test-ext-autodoc/target/__init__.py new file mode 100644 index 0000000..d7ee4ac --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/__init__.py @@ -0,0 +1,204 @@ +import enum +from io import StringIO + +from ._functions_to_import import function_to_be_imported + +__all__ = ['Class'] + +#: documentation for the integer +integer = 1 + + +def raises(exc, func, *args, **kwds): + """Raise AssertionError if ``func(*args, **kwds)`` does not raise *exc*.""" + pass + + +class CustomEx(Exception): + """My custom exception.""" + + def f(self): + """Exception method.""" + + +def _funky_classmethod(name, b, c, d, docstring=None): + """Generates a classmethod for a class from a template by filling out + some arguments.""" + def template(cls, a, b, c, d=4, e=5, f=6): + return a, b, c, d, e, f + from functools import partial + function = partial(template, b=b, c=c, d=d) + function.__name__ = name + function.__doc__ = docstring + return classmethod(function) + + +class Class: + """Class to document.""" + + def meth(self): + """Function.""" + + def undocmeth(self): + pass + + def skipmeth(self): + """Method that should be skipped.""" + + def excludemeth(self): + """Method that should be excluded.""" + + # should not be documented + skipattr = 'foo' + + #: should be documented -- süß + attr = 'bar' + + docattr = 'baz' + """should likewise be documented -- süß""" + + udocattr = 'quux' + """should be documented as well - süß""" + + # initialized to any class imported from another module + mdocattr = StringIO() + """should be documented as well - süß""" + + roger = _funky_classmethod("roger", 2, 3, 4) + + moore = _funky_classmethod("moore", 9, 8, 7, + docstring="moore(a, e, f) -> happiness") + + def __init__(self, arg): + self.inst_attr_inline = None #: an inline documented instance attr + #: a documented instance attribute + self.inst_attr_comment = None + self.inst_attr_string = None + """a documented instance attribute""" + self._private_inst_attr = None #: a private instance attribute + + def __special1__(self): + """documented special method""" + + def __special2__(self): + # undocumented special method + pass + + +class CustomDict(dict): + """Docstring.""" + + +def function(foo, *args, **kwds): + """ + Return spam. + """ + pass + + +class Outer: + """Foo""" + + class Inner: + """Foo""" + + def meth(self): + """Foo""" + + # should be documented as an alias + factory = dict + + +class InnerChild(Outer.Inner): + """InnerChild docstring""" + + +class DocstringSig: + def __new__(cls, *new_args, **new_kwargs): + """__new__(cls, d, e=1) -> DocstringSig +First line of docstring + + rest of docstring + """ + + def __init__(self, *init_args, **init_kwargs): + """__init__(self, a, b=1) -> None +First line of docstring + + rest of docstring + """ + + def meth(self): + """meth(FOO, BAR=1) -> BAZ +First line of docstring + + rest of docstring + """ + + def meth2(self): + """First line, no signature + Second line followed by indentation:: + + indented line + """ + + @property + def prop1(self): + """DocstringSig.prop1(self) + First line of docstring + """ + return 123 + + @property + def prop2(self): + """First line of docstring + Second line of docstring + """ + return 456 + + +class StrRepr(str): + """docstring""" + + def __repr__(self): + return self + + +class AttCls: + a1 = StrRepr('hello\nworld') + a2 = None + + +class InstAttCls: + """Class with documented class and instance attributes.""" + + #: Doc comment for class attribute InstAttCls.ca1. + #: It can have multiple lines. + ca1 = 'a' + + ca2 = 'b' #: Doc comment for InstAttCls.ca2. One line only. + + ca3 = 'c' + """Docstring for class attribute InstAttCls.ca3.""" + + def __init__(self): + #: Doc comment for instance attribute InstAttCls.ia1 + self.ia1 = 'd' + + self.ia2 = 'e' + """Docstring for instance attribute InstAttCls.ia2.""" + + +class CustomIter: + def __init__(self): + """Create a new `CustomIter`.""" + self.values = range(10) + + def __iter__(self): + """Iterate squares of each value.""" + for i in self.values: + yield i ** 2 + + def snafucate(self): + """Makes this snafucated.""" + print("snafucated") diff --git a/tests/roots/test-ext-autodoc/target/_functions_to_import.py b/tests/roots/test-ext-autodoc/target/_functions_to_import.py new file mode 100644 index 0000000..7663e97 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/_functions_to_import.py @@ -0,0 +1,8 @@ +from typing import TYPE_CHECKING, Optional + +if TYPE_CHECKING: + from sphinx.application import Sphinx + + +def function_to_be_imported(app: Optional["Sphinx"]) -> str: + """docstring""" diff --git a/tests/roots/test-ext-autodoc/target/abstractmethods.py b/tests/roots/test-ext-autodoc/target/abstractmethods.py new file mode 100644 index 0000000..a4396d5 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/abstractmethods.py @@ -0,0 +1,29 @@ +from abc import abstractmethod + + +class Base(): + def meth(self): + pass + + @abstractmethod + def abstractmeth(self): + pass + + @staticmethod + @abstractmethod + def staticmeth(): + pass + + @classmethod + @abstractmethod + def classmeth(cls): + pass + + @property + @abstractmethod + def prop(self): + pass + + @abstractmethod + async def coroutinemeth(self): + pass diff --git a/tests/roots/test-ext-autodoc/target/annotated.py b/tests/roots/test-ext-autodoc/target/annotated.py new file mode 100644 index 0000000..5b87518 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/annotated.py @@ -0,0 +1,8 @@ +from __future__ import annotations + +from typing import Annotated + + +def hello(name: Annotated[str, "attribute"]) -> None: + """docstring""" + pass diff --git a/tests/roots/test-ext-autodoc/target/autoclass_content.py b/tests/roots/test-ext-autodoc/target/autoclass_content.py new file mode 100644 index 0000000..52b9806 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/autoclass_content.py @@ -0,0 +1,47 @@ +class A: + """A class having no __init__, no __new__""" + + +class B: + """A class having __init__(no docstring), no __new__""" + def __init__(self): + pass + + +class C: + """A class having __init__, no __new__""" + def __init__(self): + """__init__ docstring""" + + +class D: + """A class having no __init__, __new__(no docstring)""" + def __new__(cls): + pass + + +class E: + """A class having no __init__, __new__""" + def __new__(cls): + """__new__ docstring""" + + +class F: + """A class having both __init__ and __new__""" + def __init__(self): + """__init__ docstring""" + + def __new__(cls): + """__new__ docstring""" + + +class G(C): + """A class inherits __init__ without docstring.""" + def __init__(self): + pass + + +class H(E): + """A class inherits __new__ without docstring.""" + def __init__(self): + pass diff --git a/tests/roots/test-ext-autodoc/target/autodoc_type_aliases.py b/tests/roots/test-ext-autodoc/target/autodoc_type_aliases.py new file mode 100644 index 0000000..f2c07a0 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/autodoc_type_aliases.py @@ -0,0 +1,49 @@ +from __future__ import annotations + +import io +from typing import Optional, overload + +myint = int + +#: docstring +variable: myint + +#: docstring +variable2 = None # type: myint + +#: docstring +variable3: Optional[myint] + + +def read(r: io.BytesIO) -> io.StringIO: + """docstring""" + + +def sum(x: myint, y: myint) -> myint: + """docstring""" + return x + y + + +@overload +def mult(x: myint, y: myint) -> myint: + ... + + +@overload +def mult(x: float, y: float) -> float: + ... + + +def mult(x, y): + """docstring""" + return x, y + + +class Foo: + """docstring""" + + #: docstring + attr1: myint + + def __init__(self): + self.attr2: myint = None #: docstring diff --git a/tests/roots/test-ext-autodoc/target/bound_method.py b/tests/roots/test-ext-autodoc/target/bound_method.py new file mode 100644 index 0000000..d48b9ee --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/bound_method.py @@ -0,0 +1,7 @@ +class Cls: + def method(self): + """Method docstring""" + pass + + +bound_method = Cls().method diff --git a/tests/roots/test-ext-autodoc/target/cached_property.py b/tests/roots/test-ext-autodoc/target/cached_property.py new file mode 100644 index 0000000..712d1d9 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/cached_property.py @@ -0,0 +1,12 @@ +from functools import cached_property + + +class Foo: + @cached_property + def prop(self) -> int: + return 1 + + @cached_property + def prop_with_type_comment(self): + # type: () -> int + return 1 diff --git a/tests/roots/test-ext-autodoc/target/callable.py b/tests/roots/test-ext-autodoc/target/callable.py new file mode 100644 index 0000000..6fcd505 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/callable.py @@ -0,0 +1,13 @@ +class Callable(): + """A callable object that behaves like a function.""" + + def __call__(self, arg1, arg2, **kwargs): + pass + + def method(self, arg1, arg2): + """docstring of Callable.method().""" + pass + + +function = Callable() +method = function.method diff --git a/tests/roots/test-ext-autodoc/target/canonical/__init__.py b/tests/roots/test-ext-autodoc/target/canonical/__init__.py new file mode 100644 index 0000000..4ca2b33 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/canonical/__init__.py @@ -0,0 +1 @@ +from target.canonical.original import Bar, Foo diff --git a/tests/roots/test-ext-autodoc/target/canonical/original.py b/tests/roots/test-ext-autodoc/target/canonical/original.py new file mode 100644 index 0000000..42049b2 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/canonical/original.py @@ -0,0 +1,15 @@ +class Foo: + """docstring""" + + def meth(self): + """docstring""" + + +def bar(): + class Bar: + """docstring""" + + return Bar + + +Bar = bar() diff --git a/tests/roots/test-ext-autodoc/target/classes.py b/tests/roots/test-ext-autodoc/target/classes.py new file mode 100644 index 0000000..e5cce7a --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/classes.py @@ -0,0 +1,44 @@ +from __future__ import annotations + +from inspect import Parameter, Signature +from typing import List, Union + + +class Foo: + pass + + +class Bar: + def __init__(self, x, y): + pass + + +class Baz: + def __new__(cls, x, y): + pass + + +class Qux: + __signature__ = Signature(parameters=[Parameter('foo', Parameter.POSITIONAL_OR_KEYWORD), + Parameter('bar', Parameter.POSITIONAL_OR_KEYWORD)]) + + def __init__(self, x, y): + pass + + +class Quux(List[Union[int, float]]): + """A subclass of List[Union[int, float]]""" + pass + + +class Corge(Quux): + pass + + +Alias = Foo + +#: docstring +OtherAlias = Bar + +#: docstring +IntAlias = int diff --git a/tests/roots/test-ext-autodoc/target/coroutine.py b/tests/roots/test-ext-autodoc/target/coroutine.py new file mode 100644 index 0000000..f977b6e --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/coroutine.py @@ -0,0 +1,37 @@ +import asyncio +from functools import wraps + + +class AsyncClass: + async def do_coroutine(self): + """A documented coroutine function""" + attr_coro_result = await _other_coro_func() + + @classmethod + async def do_coroutine2(cls): + """A documented coroutine classmethod""" + pass + + @staticmethod + async def do_coroutine3(): + """A documented coroutine staticmethod""" + pass + + async def do_asyncgen(self): + """A documented async generator""" + yield + + +async def _other_coro_func(): + return "run" + + +def myawait(f): + @wraps(f) + def wrapper(*args, **kwargs): + awaitable = f(*args, **kwargs) + return asyncio.run(awaitable) + return wrapper + + +sync_func = myawait(_other_coro_func) diff --git a/tests/roots/test-ext-autodoc/target/cython.pyx b/tests/roots/test-ext-autodoc/target/cython.pyx new file mode 100644 index 0000000..5d0329a --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/cython.pyx @@ -0,0 +1,13 @@ +# cython: binding=True +# cython: language_level=3str + +def foo(x: int, *args, y: str, **kwargs): + """Docstring.""" + + +class Class: + """Docstring.""" + + def meth(self, name: str, age: int = 0) -> None: + """Docstring.""" + pass diff --git a/tests/roots/test-ext-autodoc/target/decorator.py b/tests/roots/test-ext-autodoc/target/decorator.py new file mode 100644 index 0000000..faad3ff --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/decorator.py @@ -0,0 +1,53 @@ +from functools import wraps + + +def deco1(func): + """docstring for deco1""" + @wraps(func) + def wrapper(): + return func() + + return wrapper + + +def deco2(condition, message): + """docstring for deco2""" + def decorator(func): + def wrapper(): + return func() + + return wrapper + return decorator + + +@deco1 +def foo(name=None, age=None): + pass + + +class Bar: + @deco1 + def meth(self, name=None, age=None): + pass + + +class Baz: + @deco1 + def __init__(self, name=None, age=None): + pass + + +class Qux: + @deco1 + def __new__(self, name=None, age=None): + pass + + +class _Metaclass(type): + @deco1 + def __call__(self, name=None, age=None): + pass + + +class Quux(metaclass=_Metaclass): + pass diff --git a/tests/roots/test-ext-autodoc/target/descriptor.py b/tests/roots/test-ext-autodoc/target/descriptor.py new file mode 100644 index 0000000..2857c99 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/descriptor.py @@ -0,0 +1,31 @@ +class CustomDataDescriptor: + """Descriptor class docstring.""" + + def __init__(self, doc): + self.__doc__ = doc + + def __get__(self, obj, type=None): + if obj is None: + return self + return 42 + + def meth(self): + """Function.""" + return "The Answer" + + +class CustomDataDescriptorMeta(type): + """Descriptor metaclass docstring.""" + + +class CustomDataDescriptor2(CustomDataDescriptor): + """Descriptor class with custom metaclass docstring.""" + __metaclass__ = CustomDataDescriptorMeta + + +class Class: + descr = CustomDataDescriptor("Descriptor instance docstring.") + + @property + def prop(self): + """Property.""" diff --git a/tests/roots/test-ext-autodoc/target/docstring_signature.py b/tests/roots/test-ext-autodoc/target/docstring_signature.py new file mode 100644 index 0000000..981d936 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/docstring_signature.py @@ -0,0 +1,33 @@ +class A: + """A(foo, bar)""" + + +class B: + """B(foo, bar)""" + def __init__(self): + """B(foo, bar, baz)""" + + +class C: + """C(foo, bar)""" + def __new__(cls): + """C(foo, bar, baz)""" + + +class D: + def __init__(self): + """D(foo, bar, baz)""" + + +class E: + def __init__(self): + """E(foo: int, bar: int, baz: int) -> None \\ + E(foo: str, bar: str, baz: str) -> None \\ + E(foo: float, bar: float, baz: float)""" + + +class F: + def __init__(self): + """F(foo: int, bar: int, baz: int) -> None + F(foo: str, bar: str, baz: str) -> None + F(foo: float, bar: float, baz: float)""" diff --git a/tests/roots/test-ext-autodoc/target/empty_all.py b/tests/roots/test-ext-autodoc/target/empty_all.py new file mode 100644 index 0000000..c094cff --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/empty_all.py @@ -0,0 +1,16 @@ +""" +docsting of empty_all module. +""" +__all__ = [] + + +def foo(): + """docstring""" + + +def bar(): + """docstring""" + + +def baz(): + """docstring""" diff --git a/tests/roots/test-ext-autodoc/target/enums.py b/tests/roots/test-ext-autodoc/target/enums.py new file mode 100644 index 0000000..c69455f --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/enums.py @@ -0,0 +1,23 @@ +import enum + + +class EnumCls(enum.Enum): + """ + this is enum class + """ + + #: doc for val1 + val1 = 12 + val2 = 23 #: doc for val2 + val3 = 34 + """doc for val3""" + val4 = 34 + + def say_hello(self): + """a method says hello to you.""" + pass + + @classmethod + def say_goodbye(cls): + """a classmethod says good-bye to you.""" + pass diff --git a/tests/roots/test-ext-autodoc/target/final.py b/tests/roots/test-ext-autodoc/target/final.py new file mode 100644 index 0000000..a8c3860 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/final.py @@ -0,0 +1,16 @@ +from __future__ import annotations + +import typing +from typing import final + + +@typing.final +class Class: + """docstring""" + + @final + def meth1(self): + """docstring""" + + def meth2(self): + """docstring""" diff --git a/tests/roots/test-ext-autodoc/target/functions.py b/tests/roots/test-ext-autodoc/target/functions.py new file mode 100644 index 0000000..b62aa70 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/functions.py @@ -0,0 +1,19 @@ +from functools import partial + + +def func(): + pass + + +async def coroutinefunc(): + pass + + +async def asyncgenerator(): + yield + +partial_func = partial(func) +partial_coroutinefunc = partial(coroutinefunc) + +builtin_func = print +partial_builtin_func = partial(print) diff --git a/tests/roots/test-ext-autodoc/target/generic_class.py b/tests/roots/test-ext-autodoc/target/generic_class.py new file mode 100644 index 0000000..1ec8058 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/generic_class.py @@ -0,0 +1,13 @@ +from __future__ import annotations + +from typing import Generic, TypeVar + +T = TypeVar('T') + + +# Test that typing.Generic's __new__ method does not mask our class' +# __init__ signature. +class A(Generic[T]): + """docstring for A""" + def __init__(self, a, b=None): + pass diff --git a/tests/roots/test-ext-autodoc/target/genericalias.py b/tests/roots/test-ext-autodoc/target/genericalias.py new file mode 100644 index 0000000..06026fb --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/genericalias.py @@ -0,0 +1,16 @@ +from __future__ import annotations + +from typing import Callable, List + +#: A list of int +T = List[int] + +C = Callable[[int], None] # a generic alias not having a doccomment + + +class Class: + #: A list of int + T = List[int] + +#: A list of Class +L = List[Class] diff --git a/tests/roots/test-ext-autodoc/target/hide_value.py b/tests/roots/test-ext-autodoc/target/hide_value.py new file mode 100644 index 0000000..1d53aab --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/hide_value.py @@ -0,0 +1,19 @@ +#: docstring +#: +#: :meta hide-value: +SENTINEL1 = object() + +#: :meta hide-value: +SENTINEL2 = object() + + +class Foo: + """docstring""" + + #: docstring + #: + #: :meta hide-value: + SENTINEL1 = object() + + #: :meta hide-value: + SENTINEL2 = object() diff --git a/tests/roots/test-ext-autodoc/target/imported_members.py b/tests/roots/test-ext-autodoc/target/imported_members.py new file mode 100644 index 0000000..ee6e5b3 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/imported_members.py @@ -0,0 +1 @@ +from .partialfunction import func2, func3 diff --git a/tests/roots/test-ext-autodoc/target/inheritance.py b/tests/roots/test-ext-autodoc/target/inheritance.py new file mode 100644 index 0000000..e06f7a8 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/inheritance.py @@ -0,0 +1,25 @@ +class Base: + #: docstring + inheritedattr = None + + def inheritedmeth(self): + """Inherited function.""" + + @classmethod + def inheritedclassmeth(cls): + """Inherited class method.""" + + @staticmethod + def inheritedstaticmeth(cls): + """Inherited static method.""" + + +class Derived(Base): + def inheritedmeth(self): + # no docstring here + pass + + +class MyList(list): + def meth(self): + """docstring""" diff --git a/tests/roots/test-ext-autodoc/target/instance_variable.py b/tests/roots/test-ext-autodoc/target/instance_variable.py new file mode 100644 index 0000000..1d393bc --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/instance_variable.py @@ -0,0 +1,11 @@ +class Foo: + def __init__(self): + self.attr1 = None #: docstring foo + self.attr2 = None #: docstring foo + + +class Bar(Foo): + def __init__(self): + self.attr2 = None #: docstring bar + self.attr3 = None #: docstring bar + self.attr4 = None diff --git a/tests/roots/test-ext-autodoc/target/literal.py b/tests/roots/test-ext-autodoc/target/literal.py new file mode 100644 index 0000000..4340e51 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/literal.py @@ -0,0 +1,24 @@ +from __future__ import annotations + +from enum import Enum +from typing import Literal, TypeVar + + +class MyEnum(Enum): + a = 1 + + +T = TypeVar('T', bound=Literal[1234]) +"""docstring""" + + +U = TypeVar('U', bound=Literal[MyEnum.a]) +"""docstring""" + + +def bar(x: Literal[1234]): + """docstring""" + + +def foo(x: Literal[MyEnum.a]): + """docstring""" diff --git a/tests/roots/test-ext-autodoc/target/metadata.py b/tests/roots/test-ext-autodoc/target/metadata.py new file mode 100644 index 0000000..7a4488f --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/metadata.py @@ -0,0 +1,2 @@ +def foo(): + """:meta metadata-only-docstring:""" diff --git a/tests/roots/test-ext-autodoc/target/methods.py b/tests/roots/test-ext-autodoc/target/methods.py new file mode 100644 index 0000000..ad5a6a9 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/methods.py @@ -0,0 +1,29 @@ +from functools import partialmethod + + +class Base(): + def meth(self): + pass + + @staticmethod + def staticmeth(): + pass + + @classmethod + def classmeth(cls): + pass + + @property + def prop(self): + pass + + partialmeth = partialmethod(meth) + + async def coroutinemeth(self): + pass + + partial_coroutinemeth = partialmethod(coroutinemeth) + + +class Inherited(Base): + pass diff --git a/tests/roots/test-ext-autodoc/target/module.py b/tests/roots/test-ext-autodoc/target/module.py new file mode 100644 index 0000000..fe3b490 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/module.py @@ -0,0 +1,14 @@ +undocumented = 1 + +#: docstring +documented = 1 + +undoc_annotated: int + +#: docstring +annotated: int + +__special__ = 1 + +#: docstring +__documented_special__ = 1 diff --git a/tests/roots/test-ext-autodoc/target/name_conflict/__init__.py b/tests/roots/test-ext-autodoc/target/name_conflict/__init__.py new file mode 100644 index 0000000..0a6f496 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/name_conflict/__init__.py @@ -0,0 +1,6 @@ +from .foo import bar + + +class foo: + """docstring of target.name_conflict::foo.""" + pass diff --git a/tests/roots/test-ext-autodoc/target/name_conflict/foo.py b/tests/roots/test-ext-autodoc/target/name_conflict/foo.py new file mode 100644 index 0000000..bb83ca0 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/name_conflict/foo.py @@ -0,0 +1,2 @@ +class bar: + """docstring of target.name_conflict.foo::bar.""" diff --git a/tests/roots/test-ext-autodoc/target/name_mangling.py b/tests/roots/test-ext-autodoc/target/name_mangling.py new file mode 100644 index 0000000..269b51d --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/name_mangling.py @@ -0,0 +1,11 @@ +class Foo: + #: name of Foo + __name = None + __age = None + + +class Bar(Foo): + __address = None + + #: a member having mangled-like name + _Baz__email = None diff --git a/tests/roots/test-ext-autodoc/target/need_mocks.py b/tests/roots/test-ext-autodoc/target/need_mocks.py new file mode 100644 index 0000000..881220b --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/need_mocks.py @@ -0,0 +1,42 @@ +import missing_module +import missing_package1.missing_module1 +from missing_module import missing_name +from missing_package2 import missing_module2 +from missing_package3.missing_module3 import missing_name + +import sphinx.missing_module4 +from sphinx.missing_module4 import missing_name2 + + +@missing_name(int) +def decoratedFunction(): + """decoratedFunction docstring""" + return None + + +def func(arg: missing_module.Class): + """a function takes mocked object as an argument""" + pass + + +class TestAutodoc: + """TestAutodoc docstring.""" + + #: docstring + Alias = missing_module2.Class + + @missing_name + def decoratedMethod(self): + """TestAutodoc::decoratedMethod docstring""" + return None + + +class Inherited(missing_module.Class): + """docstring""" + pass + + +sphinx.missing_module4.missing_function(len(missing_name2)) + +#: docstring +Alias = missing_module2.Class diff --git a/tests/roots/test-ext-autodoc/target/overload.py b/tests/roots/test-ext-autodoc/target/overload.py new file mode 100644 index 0000000..4bcb6ea --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/overload.py @@ -0,0 +1,90 @@ +from __future__ import annotations + +from typing import Any, overload + + +@overload +def sum(x: int, y: int = 0) -> int: + ... + + +@overload +def sum(x: float, y: float = 0.0) -> float: + ... + + +@overload +def sum(x: str, y: str = ...) -> str: + ... + + +def sum(x, y=None): + """docstring""" + return x + y + + +class Math: + """docstring""" + + @overload + def sum(self, x: int, y: int = 0) -> int: + ... + + @overload + def sum(self, x: float, y: float = 0.0) -> float: + ... + + @overload + def sum(self, x: str, y: str = ...) -> str: + ... + + def sum(self, x, y=None): + """docstring""" + return x + y + + +class Foo: + """docstring""" + + @overload + def __new__(cls, x: int, y: int) -> Foo: + ... + + @overload + def __new__(cls, x: str, y: str) -> Foo: + ... + + def __new__(cls, x, y): + pass + + +class Bar: + """docstring""" + + @overload + def __init__(cls, x: int, y: int) -> None: + ... + + @overload + def __init__(cls, x: str, y: str) -> None: + ... + + def __init__(cls, x, y): + pass + + +class Meta(type): + @overload + def __call__(cls, x: int, y: int) -> Any: + ... + + @overload + def __call__(cls, x: str, y: str) -> Any: + ... + + def __call__(cls, x, y): + pass + + +class Baz(metaclass=Meta): + """docstring""" diff --git a/tests/roots/test-ext-autodoc/target/overload2.py b/tests/roots/test-ext-autodoc/target/overload2.py new file mode 100644 index 0000000..e901f79 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/overload2.py @@ -0,0 +1,5 @@ +from target.overload import Bar + + +class Baz(Bar): + pass diff --git a/tests/roots/test-ext-autodoc/target/partialfunction.py b/tests/roots/test-ext-autodoc/target/partialfunction.py new file mode 100644 index 0000000..3be63ee --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/partialfunction.py @@ -0,0 +1,12 @@ +from functools import partial + + +def func1(a, b, c): + """docstring of func1""" + pass + + +func2 = partial(func1, 1) +func3 = partial(func2, 2) +func3.__doc__ = "docstring of func3" +func4 = partial(func3, 3) diff --git a/tests/roots/test-ext-autodoc/target/partialmethod.py b/tests/roots/test-ext-autodoc/target/partialmethod.py new file mode 100644 index 0000000..20d75e9 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/partialmethod.py @@ -0,0 +1,17 @@ +from functools import partialmethod + + +class Cell: + """An example for partialmethod. + + refs: https://docs.python.jp/3/library/functools.html#functools.partialmethod + """ + + def set_state(self, state): + """Update state of cell to *state*.""" + + #: Make a cell alive. + set_alive = partialmethod(set_state, True) + + # a partialmethod with no docstring + set_dead = partialmethod(set_state, False) diff --git a/tests/roots/test-ext-autodoc/target/pep570.py b/tests/roots/test-ext-autodoc/target/pep570.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/pep570.py diff --git a/tests/roots/test-ext-autodoc/target/pep604.py b/tests/roots/test-ext-autodoc/target/pep604.py new file mode 100644 index 0000000..9b1f94a --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/pep604.py @@ -0,0 +1,16 @@ +from __future__ import annotations + +attr: int | str #: docstring + + +def sum(x: int | str, y: int | str) -> int | str: + """docstring""" + + +class Foo: + """docstring""" + + attr: int | str #: docstring + + def meth(self, x: int | str, y: int | str) -> int | str: + """docstring""" diff --git a/tests/roots/test-ext-autodoc/target/preserve_defaults.py b/tests/roots/test-ext-autodoc/target/preserve_defaults.py new file mode 100644 index 0000000..86e1038 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/preserve_defaults.py @@ -0,0 +1,60 @@ +from __future__ import annotations + +from datetime import datetime +from typing import Any + +CONSTANT = 'foo' +SENTINEL = object() + + +def foo(name: str = CONSTANT, + sentinel: Any = SENTINEL, + now: datetime = datetime.now(), + color: int = 0xFFFFFF, + *, + kwarg1, + kwarg2 = 0xFFFFFF) -> None: + """docstring""" + + +class Class: + """docstring""" + + def meth(self, name: str = CONSTANT, sentinel: Any = SENTINEL, + now: datetime = datetime.now(), color: int = 0xFFFFFF, + *, kwarg1, kwarg2 = 0xFFFFFF) -> None: + """docstring""" + + @classmethod + def clsmeth(cls, name: str = CONSTANT, sentinel: Any = SENTINEL, + now: datetime = datetime.now(), color: int = 0xFFFFFF, + *, kwarg1, kwarg2 = 0xFFFFFF) -> None: + """docstring""" + + +get_sentinel = lambda custom=SENTINEL: custom +"""docstring""" + + +class MultiLine: + """docstring""" + + # The properties will raise a silent SyntaxError because "lambda self: 1" + # will be detected as a function to update the default values of. However, + # only prop3 will not fail because it's on a single line whereas the others + # will fail to parse. + + prop1 = property( + lambda self: 1, doc="docstring") + + prop2 = property( + lambda self: 2, doc="docstring" + ) + + prop3 = property(lambda self: 3, doc="docstring") + + prop4 = (property + (lambda self: 4, doc="docstring")) + + prop5 = property\ + (lambda self: 5, doc="docstring") diff --git a/tests/roots/test-ext-autodoc/target/preserve_defaults_special_constructs.py b/tests/roots/test-ext-autodoc/target/preserve_defaults_special_constructs.py new file mode 100644 index 0000000..0fdb11a --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/preserve_defaults_special_constructs.py @@ -0,0 +1,50 @@ +from __future__ import annotations + +from collections import namedtuple +from dataclasses import dataclass, field +from typing import NamedTuple, TypedDict + +#: docstring +SENTINEL = object() + + +#: docstring +ze_lambda = lambda z=SENTINEL: None + + +def foo(x, y, z=SENTINEL): + """docstring""" + + +@dataclass +class DataClass: + """docstring""" + a: int + b: object = SENTINEL + c: list[int] = field(default_factory=lambda: [1, 2, 3]) + + +@dataclass(init=False) +class DataClassNoInit: + """docstring""" + a: int + b: object = SENTINEL + c: list[int] = field(default_factory=lambda: [1, 2, 3]) + + +class MyTypedDict(TypedDict): + """docstring""" + a: int + b: object + c: list[int] + + +class MyNamedTuple1(NamedTuple): + """docstring""" + a: int + b: object = object() + c: list[int] = [1, 2, 3] + + +class MyNamedTuple2(namedtuple('Base', ('a', 'b'), defaults=(0, SENTINEL))): + """docstring""" diff --git a/tests/roots/test-ext-autodoc/target/private.py b/tests/roots/test-ext-autodoc/target/private.py new file mode 100644 index 0000000..e463448 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/private.py @@ -0,0 +1,27 @@ +def private_function(name): + """private_function is a docstring(). + + :meta private: + """ + +def _public_function(name): + """public_function is a docstring(). + + :meta public: + """ + + +PRIVATE_CONSTANT = None #: :meta private: +_PUBLIC_CONSTANT = None #: :meta public: + + +class Foo: + #: A public class attribute whose name starts with an underscore. + #: + #: :meta public: + _public_attribute = 47 + + #: A private class attribute whose name does not start with an underscore. + #: + #: :meta private: + private_attribute = 11 diff --git a/tests/roots/test-ext-autodoc/target/process_docstring.py b/tests/roots/test-ext-autodoc/target/process_docstring.py new file mode 100644 index 0000000..6005943 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/process_docstring.py @@ -0,0 +1,8 @@ +def func(): + """ + first line + --- + second line + --- + third line + """ diff --git a/tests/roots/test-ext-autodoc/target/properties.py b/tests/roots/test-ext-autodoc/target/properties.py new file mode 100644 index 0000000..018f51e --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/properties.py @@ -0,0 +1,22 @@ +class Foo: + """docstring""" + + @property + def prop1(self) -> int: + """docstring""" + + @classmethod + @property + def prop2(self) -> int: + """docstring""" + + @property + def prop1_with_type_comment(self): + # type: () -> int + """docstring""" + + @classmethod + @property + def prop2_with_type_comment(self): + # type: () -> int + """docstring""" diff --git a/tests/roots/test-ext-autodoc/target/singledispatch.py b/tests/roots/test-ext-autodoc/target/singledispatch.py new file mode 100644 index 0000000..3dd5aaf --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/singledispatch.py @@ -0,0 +1,36 @@ +import inspect +from functools import singledispatch + + +def assign_signature(func): + # This is intended to cover more complex signature-rewriting decorators. + func.__signature__ = inspect.signature(func) + return func + + +@singledispatch +def func(arg, kwarg=None): + """A function for general use.""" + pass + + +@func.register(int) +@func.register(float) +def _func_int(arg, kwarg=None): + """A function for int.""" + pass + + +@func.register(str) +@assign_signature +def _func_str(arg, kwarg=None): + """A function for str.""" + pass + + +@func.register +def _func_dict(arg: dict, kwarg=None): + """A function for dict.""" + # This function tests for specifying type through annotations + pass + diff --git a/tests/roots/test-ext-autodoc/target/singledispatchmethod.py b/tests/roots/test-ext-autodoc/target/singledispatchmethod.py new file mode 100644 index 0000000..fb92293 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/singledispatchmethod.py @@ -0,0 +1,27 @@ +from functools import singledispatchmethod + + +class Foo: + """docstring""" + + @singledispatchmethod + def meth(self, arg, kwarg=None): + """A method for general use.""" + pass + + @meth.register(int) + @meth.register(float) + def _meth_int(self, arg, kwarg=None): + """A method for int.""" + pass + + @meth.register(str) + def _meth_str(self, arg, kwarg=None): + """A method for str.""" + pass + + @meth.register + def _meth_dict(self, arg: dict, kwarg=None): + """A method for dict.""" + # This function tests for specifying type through annotations + pass diff --git a/tests/roots/test-ext-autodoc/target/slots.py b/tests/roots/test-ext-autodoc/target/slots.py new file mode 100644 index 0000000..75c7a4a --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/slots.py @@ -0,0 +1,22 @@ +class Foo: + """docstring""" + + __slots__ = ['attr'] + + +class Bar: + """docstring""" + + __slots__ = {'attr1': 'docstring of attr1', + 'attr2': 'docstring of attr2', + 'attr3': None} + __annotations__ = {'attr1': int} + + def __init__(self): + self.attr2 = None #: docstring of instance attr2 + + +class Baz: + """docstring""" + + __slots__ = 'attr' diff --git a/tests/roots/test-ext-autodoc/target/sort_by_all.py b/tests/roots/test-ext-autodoc/target/sort_by_all.py new file mode 100644 index 0000000..03def47 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/sort_by_all.py @@ -0,0 +1,25 @@ +__all__ = ['baz', 'foo', 'Bar'] + + +def foo(): + pass + + +class Bar: + pass + + +def baz(): + pass + + +def qux(): + pass + + +class Quux: + pass + + +def foobar(): + pass diff --git a/tests/roots/test-ext-autodoc/target/typed_vars.py b/tests/roots/test-ext-autodoc/target/typed_vars.py new file mode 100644 index 0000000..0fe7468 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/typed_vars.py @@ -0,0 +1,34 @@ +#: attr1 +attr1: str = '' +#: attr2 +attr2: str +#: attr3 +attr3 = '' # type: str + + +class _Descriptor: + def __init__(self, name): + self.__doc__ = f"This is {name}" + def __get__(self): + pass + + +class Class: + attr1: int = 0 + attr2: int + attr3 = 0 # type: int + + descr4: int = _Descriptor("descr4") + + def __init__(self): + self.attr4: int = 0 #: attr4 + self.attr5: int #: attr5 + self.attr6 = 0 # type: int + """attr6""" + + +class Derived(Class): + attr7: int + + +Alias = Derived diff --git a/tests/roots/test-ext-autodoc/target/typehints.py b/tests/roots/test-ext-autodoc/target/typehints.py new file mode 100644 index 0000000..9071594 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/typehints.py @@ -0,0 +1,105 @@ +from __future__ import annotations + +import pathlib +from typing import Any, Tuple, TypeVar, Union + +CONST1: int +#: docstring +CONST2: int = 1 +#: docstring +CONST3: pathlib.PurePosixPath = pathlib.PurePosixPath("/a/b/c") +#: docstring +T = TypeVar("T", bound=pathlib.PurePosixPath) + + +def incr(a: int, b: int = 1) -> int: + return a + b + + +def decr(a, b = 1): + # type: (int, int) -> int + return a - b + + +class Math: + CONST1: int + CONST2: int = 1 + CONST3: pathlib.PurePosixPath = pathlib.PurePosixPath("/a/b/c") + + def __init__(self, s: str, o: Any = None) -> None: + pass + + def incr(self, a: int, b: int = 1) -> int: + return a + b + + def decr(self, a, b = 1): + # type: (int, int) -> int + return a - b + + def nothing(self): + # type: () -> None + pass + + def horse(self, + a, # type: str + b, # type: int + ): + # type: (...) -> None + return + + @property + def prop(self) -> int: + return 0 + + @property + def path(self) -> pathlib.PurePosixPath: + return pathlib.PurePosixPath("/a/b/c") + + +def tuple_args(x: tuple[int, int | str]) -> tuple[int, int]: + pass + + +class NewAnnotation: + def __new__(cls, i: int) -> NewAnnotation: + pass + + +class NewComment: + def __new__(cls, i): + # type: (int) -> NewComment + pass + + +class _MetaclassWithCall(type): + def __call__(cls, a: int): + pass + + +class SignatureFromMetaclass(metaclass=_MetaclassWithCall): + pass + + +def complex_func(arg1, arg2, arg3=None, *args, **kwargs): + # type: (str, List[int], Tuple[int, Union[str, Unknown]], *str, **str) -> None + pass + + +def missing_attr(c, + a, # type: str + b=None # type: Optional[str] + ): + # type: (...) -> str + return a + (b or "") + + +class _ClassWithDocumentedInit: + """Class docstring.""" + + def __init__(self, x: int, *args: int, **kwargs: int) -> None: + """Init docstring. + + :param x: Some integer + :param args: Some integer + :param kwargs: Some integer + """ diff --git a/tests/roots/test-ext-autodoc/target/typevar.py b/tests/roots/test-ext-autodoc/target/typevar.py new file mode 100644 index 0000000..1a02f3e --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/typevar.py @@ -0,0 +1,32 @@ +from __future__ import annotations + +from datetime import date +from typing import NewType, TypeVar + +#: T1 +T1 = TypeVar("T1") + +T2 = TypeVar("T2") # A TypeVar not having doc comment + +#: T3 +T3 = TypeVar("T3", int, str) + +#: T4 +T4 = TypeVar("T4", covariant=True) + +#: T5 +T5 = TypeVar("T5", contravariant=True) + +#: T6 +T6 = NewType("T6", date) + +#: T7 +T7 = TypeVar("T7", bound=int) + + +class Class: + #: T1 + T1 = TypeVar("T1") + + #: T6 + T6 = NewType("T6", date) diff --git a/tests/roots/test-ext-autodoc/target/uninitialized_attributes.py b/tests/roots/test-ext-autodoc/target/uninitialized_attributes.py new file mode 100644 index 0000000..e0f229c --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/uninitialized_attributes.py @@ -0,0 +1,8 @@ +class Base: + attr1: int #: docstring + attr2: str + + +class Derived(Base): + attr3: int #: docstring + attr4: str diff --git a/tests/roots/test-ext-autodoc/target/wrappedfunction.py b/tests/roots/test-ext-autodoc/target/wrappedfunction.py new file mode 100644 index 0000000..064d777 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/wrappedfunction.py @@ -0,0 +1,17 @@ +from __future__ import annotations + +from contextlib import contextmanager +from functools import lru_cache +from typing import Generator + + +@lru_cache(maxsize=None) +def slow_function(message, timeout): + """This function is slow.""" + print(message) + + +@contextmanager +def feeling_good(x: int, y: int) -> Generator: + """You'll feel better in this context!""" + yield diff --git a/tests/roots/test-ext-autosectionlabel-prefix-document/conf.py b/tests/roots/test-ext-autosectionlabel-prefix-document/conf.py new file mode 100644 index 0000000..78fb56c --- /dev/null +++ b/tests/roots/test-ext-autosectionlabel-prefix-document/conf.py @@ -0,0 +1,2 @@ +extensions = ['sphinx.ext.autosectionlabel'] +autosectionlabel_prefix_document = True diff --git a/tests/roots/test-ext-autosectionlabel-prefix-document/index.rst b/tests/roots/test-ext-autosectionlabel-prefix-document/index.rst new file mode 100644 index 0000000..d767373 --- /dev/null +++ b/tests/roots/test-ext-autosectionlabel-prefix-document/index.rst @@ -0,0 +1,37 @@ +========================================= +test-ext-autosectionlabel-prefix-document +========================================= + + +Introduce of Sphinx +=================== + +Installation +============ + +For Windows users +----------------- + +For UNIX users +-------------- + +Linux +^^^^^ + +FreeBSD +^^^^^^^ + +This one's got an apostrophe +---------------------------- + + +References +========== + +* :ref:`index:Introduce of Sphinx` +* :ref:`index:Installation` +* :ref:`index:For Windows users` +* :ref:`index:For UNIX users` +* :ref:`index:Linux` +* :ref:`index:FreeBSD` +* :ref:`index:This one's got an apostrophe` diff --git a/tests/roots/test-ext-autosectionlabel/conf.py b/tests/roots/test-ext-autosectionlabel/conf.py new file mode 100644 index 0000000..31e93ff --- /dev/null +++ b/tests/roots/test-ext-autosectionlabel/conf.py @@ -0,0 +1 @@ +extensions = ['sphinx.ext.autosectionlabel'] diff --git a/tests/roots/test-ext-autosectionlabel/index.rst b/tests/roots/test-ext-autosectionlabel/index.rst new file mode 100644 index 0000000..133206e --- /dev/null +++ b/tests/roots/test-ext-autosectionlabel/index.rst @@ -0,0 +1,37 @@ +========================= +test-ext-autosectionlabel +========================= + + +Introduce of Sphinx +=================== + +Installation +============ + +For Windows users +----------------- + +For UNIX users +-------------- + +Linux +^^^^^ + +FreeBSD +^^^^^^^ + +This one's got an apostrophe +---------------------------- + +References +========== + +* :ref:`test-ext-autosectionlabel` +* :ref:`Introduce of Sphinx` +* :ref:`Installation` +* :ref:`For Windows users` +* :ref:`For UNIX users` +* :ref:`Linux` +* :ref:`FreeBSD` +* :ref:`This one's got an apostrophe` diff --git a/tests/roots/test-ext-autosummary-filename-map/autosummary_dummy_module.py b/tests/roots/test-ext-autosummary-filename-map/autosummary_dummy_module.py new file mode 100644 index 0000000..b88e335 --- /dev/null +++ b/tests/roots/test-ext-autosummary-filename-map/autosummary_dummy_module.py @@ -0,0 +1,23 @@ +from __future__ import annotations + +from os import path +from typing import Union + + +class Foo: + class Bar: + pass + + def __init__(self): + pass + + def bar(self): + pass + + @property + def baz(self): + pass + + +def bar(x: int | str, y: int = 1) -> None: + pass diff --git a/tests/roots/test-ext-autosummary-filename-map/conf.py b/tests/roots/test-ext-autosummary-filename-map/conf.py new file mode 100644 index 0000000..17e2fa4 --- /dev/null +++ b/tests/roots/test-ext-autosummary-filename-map/conf.py @@ -0,0 +1,11 @@ +import os +import sys + +sys.path.insert(0, os.path.abspath('.')) + +extensions = ['sphinx.ext.autosummary'] +autosummary_generate = True +autosummary_filename_map = { + "autosummary_dummy_module": "module_mangled", + "autosummary_dummy_module.bar": "bar" +} diff --git a/tests/roots/test-ext-autosummary-filename-map/index.rst b/tests/roots/test-ext-autosummary-filename-map/index.rst new file mode 100644 index 0000000..57d902b --- /dev/null +++ b/tests/roots/test-ext-autosummary-filename-map/index.rst @@ -0,0 +1,9 @@ + +.. autosummary:: + :toctree: generated + :caption: An autosummary + + autosummary_dummy_module + autosummary_dummy_module.Foo + autosummary_dummy_module.Foo.bar + autosummary_dummy_module.bar diff --git a/tests/roots/test-ext-autosummary-imported_members/autosummary_dummy_package/__init__.py b/tests/roots/test-ext-autosummary-imported_members/autosummary_dummy_package/__init__.py new file mode 100644 index 0000000..0a7d9f3 --- /dev/null +++ b/tests/roots/test-ext-autosummary-imported_members/autosummary_dummy_package/__init__.py @@ -0,0 +1 @@ +from .autosummary_dummy_module import Bar, foo diff --git a/tests/roots/test-ext-autosummary-imported_members/autosummary_dummy_package/autosummary_dummy_module.py b/tests/roots/test-ext-autosummary-imported_members/autosummary_dummy_package/autosummary_dummy_module.py new file mode 100644 index 0000000..9c93f06 --- /dev/null +++ b/tests/roots/test-ext-autosummary-imported_members/autosummary_dummy_package/autosummary_dummy_module.py @@ -0,0 +1,8 @@ +class Bar: + """Bar class""" + pass + + +def foo(): + """Foo function""" + pass diff --git a/tests/roots/test-ext-autosummary-imported_members/conf.py b/tests/roots/test-ext-autosummary-imported_members/conf.py new file mode 100644 index 0000000..77af668 --- /dev/null +++ b/tests/roots/test-ext-autosummary-imported_members/conf.py @@ -0,0 +1,8 @@ +import os +import sys + +sys.path.insert(0, os.path.abspath('.')) + +extensions = ['sphinx.ext.autosummary'] +autosummary_generate = True +autosummary_imported_members = True diff --git a/tests/roots/test-ext-autosummary-imported_members/index.rst b/tests/roots/test-ext-autosummary-imported_members/index.rst new file mode 100644 index 0000000..1c55126 --- /dev/null +++ b/tests/roots/test-ext-autosummary-imported_members/index.rst @@ -0,0 +1,7 @@ +test-ext-autosummary-imported_members +===================================== + +.. autosummary:: + :toctree: generated + + autosummary_dummy_package diff --git a/tests/roots/test-ext-autosummary-mock_imports/conf.py b/tests/roots/test-ext-autosummary-mock_imports/conf.py new file mode 100644 index 0000000..121f814 --- /dev/null +++ b/tests/roots/test-ext-autosummary-mock_imports/conf.py @@ -0,0 +1,8 @@ +import os +import sys + +sys.path.insert(0, os.path.abspath('.')) + +extensions = ['sphinx.ext.autosummary'] +autosummary_generate = True +autosummary_mock_imports = ['unknown'] diff --git a/tests/roots/test-ext-autosummary-mock_imports/foo.py b/tests/roots/test-ext-autosummary-mock_imports/foo.py new file mode 100644 index 0000000..ab4460e --- /dev/null +++ b/tests/roots/test-ext-autosummary-mock_imports/foo.py @@ -0,0 +1,6 @@ +import unknown + + +class Foo(unknown.Class): + """Foo class""" + pass diff --git a/tests/roots/test-ext-autosummary-mock_imports/index.rst b/tests/roots/test-ext-autosummary-mock_imports/index.rst new file mode 100644 index 0000000..f6044ed --- /dev/null +++ b/tests/roots/test-ext-autosummary-mock_imports/index.rst @@ -0,0 +1,7 @@ +test-ext-autosummary-mock_imports +================================= + +.. autosummary:: + :toctree: generated + + foo diff --git a/tests/roots/test-ext-autosummary-module_all/autosummary_dummy_package_all/__init__.py b/tests/roots/test-ext-autosummary-module_all/autosummary_dummy_package_all/__init__.py new file mode 100644 index 0000000..82f2060 --- /dev/null +++ b/tests/roots/test-ext-autosummary-module_all/autosummary_dummy_package_all/__init__.py @@ -0,0 +1,13 @@ +from .autosummary_dummy_module import Bar, PublicBar, foo, public_foo + + +def baz(): + """Baz function""" + pass + + +def public_baz(): + """Public Baz function""" + + +__all__ = ["PublicBar", "public_foo", "public_baz", "extra_dummy_module"] diff --git a/tests/roots/test-ext-autosummary-module_all/autosummary_dummy_package_all/autosummary_dummy_module.py b/tests/roots/test-ext-autosummary-module_all/autosummary_dummy_package_all/autosummary_dummy_module.py new file mode 100644 index 0000000..ef89e22 --- /dev/null +++ b/tests/roots/test-ext-autosummary-module_all/autosummary_dummy_package_all/autosummary_dummy_module.py @@ -0,0 +1,20 @@ +class Bar: + """Bar class""" + + pass + + +class PublicBar: + """Public Bar class""" + + pass + + +def foo(): + """Foo function""" + pass + + +def public_foo(): + """Public Foo function""" + pass diff --git a/tests/roots/test-ext-autosummary-module_all/autosummary_dummy_package_all/extra_dummy_module.py b/tests/roots/test-ext-autosummary-module_all/autosummary_dummy_package_all/extra_dummy_module.py new file mode 100644 index 0000000..ef89e22 --- /dev/null +++ b/tests/roots/test-ext-autosummary-module_all/autosummary_dummy_package_all/extra_dummy_module.py @@ -0,0 +1,20 @@ +class Bar: + """Bar class""" + + pass + + +class PublicBar: + """Public Bar class""" + + pass + + +def foo(): + """Foo function""" + pass + + +def public_foo(): + """Public Foo function""" + pass diff --git a/tests/roots/test-ext-autosummary-module_all/conf.py b/tests/roots/test-ext-autosummary-module_all/conf.py new file mode 100644 index 0000000..c6ff534 --- /dev/null +++ b/tests/roots/test-ext-autosummary-module_all/conf.py @@ -0,0 +1,8 @@ +import os +import sys + +sys.path.insert(0, os.path.abspath('.')) + +extensions = ['sphinx.ext.autosummary'] +autosummary_generate = True +autosummary_ignore_module_all = False diff --git a/tests/roots/test-ext-autosummary-module_all/index.rst b/tests/roots/test-ext-autosummary-module_all/index.rst new file mode 100644 index 0000000..cd638ad --- /dev/null +++ b/tests/roots/test-ext-autosummary-module_all/index.rst @@ -0,0 +1,8 @@ +test-ext-autosummary-module_all +=============================== + +.. autosummary:: + :toctree: generated + :recursive: + + autosummary_dummy_package_all diff --git a/tests/roots/test-ext-autosummary-recursive/conf.py b/tests/roots/test-ext-autosummary-recursive/conf.py new file mode 100644 index 0000000..1c0d022 --- /dev/null +++ b/tests/roots/test-ext-autosummary-recursive/conf.py @@ -0,0 +1,7 @@ +import os +import sys + +sys.path.insert(0, os.path.abspath('.')) + +extensions = ['sphinx.ext.autosummary'] +autosummary_generate = True diff --git a/tests/roots/test-ext-autosummary-recursive/index.rst b/tests/roots/test-ext-autosummary-recursive/index.rst new file mode 100644 index 0000000..5855bfa --- /dev/null +++ b/tests/roots/test-ext-autosummary-recursive/index.rst @@ -0,0 +1,15 @@ +API Reference +============= + +.. rubric:: Packages + +.. autosummary:: + :toctree: generated + :recursive: + + package + +.. autosummary:: + :toctree: generated + + package2 diff --git a/tests/roots/test-ext-autosummary-recursive/package/__init__.py b/tests/roots/test-ext-autosummary-recursive/package/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-ext-autosummary-recursive/package/__init__.py diff --git a/tests/roots/test-ext-autosummary-recursive/package/module.py b/tests/roots/test-ext-autosummary-recursive/package/module.py new file mode 100644 index 0000000..c76e733 --- /dev/null +++ b/tests/roots/test-ext-autosummary-recursive/package/module.py @@ -0,0 +1,13 @@ +from os import * + + +class Foo: + def __init__(self): + pass + + def bar(self): + pass + + @property + def baz(self): + pass diff --git a/tests/roots/test-ext-autosummary-recursive/package/module_importfail.py b/tests/roots/test-ext-autosummary-recursive/package/module_importfail.py new file mode 100644 index 0000000..5c6ce56 --- /dev/null +++ b/tests/roots/test-ext-autosummary-recursive/package/module_importfail.py @@ -0,0 +1,2 @@ +# Fail module import in a catastrophic way +raise SystemExit(1) diff --git a/tests/roots/test-ext-autosummary-recursive/package/package/__init__.py b/tests/roots/test-ext-autosummary-recursive/package/package/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-ext-autosummary-recursive/package/package/__init__.py diff --git a/tests/roots/test-ext-autosummary-recursive/package/package/module.py b/tests/roots/test-ext-autosummary-recursive/package/package/module.py new file mode 100644 index 0000000..c76e733 --- /dev/null +++ b/tests/roots/test-ext-autosummary-recursive/package/package/module.py @@ -0,0 +1,13 @@ +from os import * + + +class Foo: + def __init__(self): + pass + + def bar(self): + pass + + @property + def baz(self): + pass diff --git a/tests/roots/test-ext-autosummary-recursive/package2/__init__.py b/tests/roots/test-ext-autosummary-recursive/package2/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-ext-autosummary-recursive/package2/__init__.py diff --git a/tests/roots/test-ext-autosummary-recursive/package2/module.py b/tests/roots/test-ext-autosummary-recursive/package2/module.py new file mode 100644 index 0000000..c76e733 --- /dev/null +++ b/tests/roots/test-ext-autosummary-recursive/package2/module.py @@ -0,0 +1,13 @@ +from os import * + + +class Foo: + def __init__(self): + pass + + def bar(self): + pass + + @property + def baz(self): + pass diff --git a/tests/roots/test-ext-autosummary-skip-member/conf.py b/tests/roots/test-ext-autosummary-skip-member/conf.py new file mode 100644 index 0000000..7c8f0e9 --- /dev/null +++ b/tests/roots/test-ext-autosummary-skip-member/conf.py @@ -0,0 +1,20 @@ +import os +import sys + +sys.path.insert(0, os.path.abspath('.')) + + +extensions = ['sphinx.ext.autosummary'] +autosummary_generate = True +autodoc_default_options = {'members': True} + + +def skip_member(app, what, name, obj, skip, options): + if name == 'skipmeth': + return True + elif name == '_privatemeth': + return False + + +def setup(app): + app.connect('autodoc-skip-member', skip_member) diff --git a/tests/roots/test-ext-autosummary-skip-member/index.rst b/tests/roots/test-ext-autosummary-skip-member/index.rst new file mode 100644 index 0000000..c376a13 --- /dev/null +++ b/tests/roots/test-ext-autosummary-skip-member/index.rst @@ -0,0 +1,4 @@ +.. autosummary:: + :toctree: generate + + target.Foo diff --git a/tests/roots/test-ext-autosummary-skip-member/target.py b/tests/roots/test-ext-autosummary-skip-member/target.py new file mode 100644 index 0000000..fdf557e --- /dev/null +++ b/tests/roots/test-ext-autosummary-skip-member/target.py @@ -0,0 +1,14 @@ +class Foo: + """docstring of Foo.""" + + def meth(self): + """docstring of meth.""" + pass + + def skipmeth(self): + """docstring of skipmeth.""" + pass + + def _privatemeth(self): + """docstring of _privatemeth.""" + pass diff --git a/tests/roots/test-ext-autosummary-template/_templates/empty.rst b/tests/roots/test-ext-autosummary-template/_templates/empty.rst new file mode 100644 index 0000000..7f7204c --- /dev/null +++ b/tests/roots/test-ext-autosummary-template/_templates/empty.rst @@ -0,0 +1 @@ +EMPTY diff --git a/tests/roots/test-ext-autosummary-template/conf.py b/tests/roots/test-ext-autosummary-template/conf.py new file mode 100644 index 0000000..cc23635 --- /dev/null +++ b/tests/roots/test-ext-autosummary-template/conf.py @@ -0,0 +1,10 @@ +import os +import sys + +sys.path.insert(0, os.path.abspath('.')) + + +extensions = ['sphinx.ext.autosummary'] +autosummary_generate = True +autodoc_default_options = {'members': True} +templates_path = ['_templates'] diff --git a/tests/roots/test-ext-autosummary-template/index.rst b/tests/roots/test-ext-autosummary-template/index.rst new file mode 100644 index 0000000..c9f28b0 --- /dev/null +++ b/tests/roots/test-ext-autosummary-template/index.rst @@ -0,0 +1,5 @@ +.. autosummary:: + :toctree: generate + :template: empty.rst + + target.Foo diff --git a/tests/roots/test-ext-autosummary-template/target.py b/tests/roots/test-ext-autosummary-template/target.py new file mode 100644 index 0000000..c607b59 --- /dev/null +++ b/tests/roots/test-ext-autosummary-template/target.py @@ -0,0 +1,2 @@ +class Foo: + """docstring of Foo.""" diff --git a/tests/roots/test-ext-autosummary/autosummary_class_module.py b/tests/roots/test-ext-autosummary/autosummary_class_module.py new file mode 100644 index 0000000..f13de17 --- /dev/null +++ b/tests/roots/test-ext-autosummary/autosummary_class_module.py @@ -0,0 +1,2 @@ +class Class(): + pass diff --git a/tests/roots/test-ext-autosummary/autosummary_dummy_inherited_module.py b/tests/roots/test-ext-autosummary/autosummary_dummy_inherited_module.py new file mode 100644 index 0000000..2b3d2da --- /dev/null +++ b/tests/roots/test-ext-autosummary/autosummary_dummy_inherited_module.py @@ -0,0 +1,13 @@ +from autosummary_dummy_module import Foo + + +class InheritedAttrClass(Foo): + + def __init__(self): + #: other docstring + self.subclassattr = "subclassattr" + + super().__init__() + + +__all__ = ["InheritedAttrClass"] diff --git a/tests/roots/test-ext-autosummary/autosummary_dummy_module.py b/tests/roots/test-ext-autosummary/autosummary_dummy_module.py new file mode 100644 index 0000000..2d8829a --- /dev/null +++ b/tests/roots/test-ext-autosummary/autosummary_dummy_module.py @@ -0,0 +1,68 @@ +from os import path +from typing import Union + +from autosummary_class_module import Class + +__all__ = [ + "CONSTANT1", + "Exc", + "Foo", + "_Baz", + "bar", + "qux", + "path", +] + +#: module variable +CONSTANT1 = None +CONSTANT2 = None + + +class Foo: + #: class variable + CONSTANT3 = None + CONSTANT4 = None + + class Bar: + pass + + def __init__(self): + #: docstring + self.value = 1 + + def bar(self): + pass + + @property + def baz(self): + pass + + +class _Baz: + pass + + +def bar(x: Union[int, str], y: int = 1) -> None: + pass + + +def _quux(): + pass + + +class Exc(Exception): + pass + + +class _Exc(Exception): + pass + + +#: a module-level attribute +qux = 2 +#: a module-level attribute that has been excluded from __all__ +quuz = 2 + +considered_as_imported = Class() +non_imported_member = Class() +""" This attribute has a docstring, so it is recognized as a not-imported member """ diff --git a/tests/roots/test-ext-autosummary/autosummary_importfail.py b/tests/roots/test-ext-autosummary/autosummary_importfail.py new file mode 100644 index 0000000..5c6ce56 --- /dev/null +++ b/tests/roots/test-ext-autosummary/autosummary_importfail.py @@ -0,0 +1,2 @@ +# Fail module import in a catastrophic way +raise SystemExit(1) diff --git a/tests/roots/test-ext-autosummary/conf.py b/tests/roots/test-ext-autosummary/conf.py new file mode 100644 index 0000000..55c769c --- /dev/null +++ b/tests/roots/test-ext-autosummary/conf.py @@ -0,0 +1,10 @@ +import os +import sys + +sys.path.insert(0, os.path.abspath('.')) + +extensions = ['sphinx.ext.autosummary'] +autosummary_generate = True + +# The suffix of source filenames. +source_suffix = '.rst' diff --git a/tests/roots/test-ext-autosummary/index.rst b/tests/roots/test-ext-autosummary/index.rst new file mode 100644 index 0000000..08bd0f0 --- /dev/null +++ b/tests/roots/test-ext-autosummary/index.rst @@ -0,0 +1,18 @@ + +:autolink:`autosummary_dummy_module.Foo` + +:autolink:`autosummary_importfail` + +.. autosummary:: + :toctree: generated + :caption: An autosummary + + autosummary_dummy_module + autosummary_dummy_module.Foo + autosummary_dummy_module.Foo.Bar + autosummary_dummy_module.Foo.value + autosummary_dummy_module.bar + autosummary_dummy_module.qux + autosummary_dummy_inherited_module.InheritedAttrClass + autosummary_dummy_inherited_module.InheritedAttrClass.subclassattr + autosummary_importfail diff --git a/tests/roots/test-ext-coverage/conf.py b/tests/roots/test-ext-coverage/conf.py new file mode 100644 index 0000000..d3ec6e8 --- /dev/null +++ b/tests/roots/test-ext-coverage/conf.py @@ -0,0 +1,12 @@ +import os +import sys + +sys.path.insert(0, os.path.abspath('.')) + +extensions = ['sphinx.ext.autodoc', 'sphinx.ext.coverage'] + +coverage_ignore_pyobjects = [ + r'^coverage_ignored(\..*)?$', + r'\.Ignored$', + r'\.Documented\.ignored\d$', +] diff --git a/tests/roots/test-ext-coverage/coverage_ignored.py b/tests/roots/test-ext-coverage/coverage_ignored.py new file mode 100644 index 0000000..b762955 --- /dev/null +++ b/tests/roots/test-ext-coverage/coverage_ignored.py @@ -0,0 +1,22 @@ +class Documented: + """Documented""" + + def ignored1(self): + pass + + def ignored2(self): + pass + + def not_ignored1(self): + pass + + def not_ignored2(self): + pass + + +class Ignored: + pass + + +class NotIgnored: + pass diff --git a/tests/roots/test-ext-coverage/coverage_not_ignored.py b/tests/roots/test-ext-coverage/coverage_not_ignored.py new file mode 100644 index 0000000..b762955 --- /dev/null +++ b/tests/roots/test-ext-coverage/coverage_not_ignored.py @@ -0,0 +1,22 @@ +class Documented: + """Documented""" + + def ignored1(self): + pass + + def ignored2(self): + pass + + def not_ignored1(self): + pass + + def not_ignored2(self): + pass + + +class Ignored: + pass + + +class NotIgnored: + pass diff --git a/tests/roots/test-ext-coverage/index.rst b/tests/roots/test-ext-coverage/index.rst new file mode 100644 index 0000000..b846898 --- /dev/null +++ b/tests/roots/test-ext-coverage/index.rst @@ -0,0 +1,6 @@ +.. automodule:: coverage_ignored + :members: + + +.. automodule:: coverage_not_ignored + :members: diff --git a/tests/roots/test-ext-doctest-skipif/conf.py b/tests/roots/test-ext-doctest-skipif/conf.py new file mode 100644 index 0000000..6f54982 --- /dev/null +++ b/tests/roots/test-ext-doctest-skipif/conf.py @@ -0,0 +1,16 @@ +extensions = ['sphinx.ext.doctest'] + +project = 'test project for the doctest :skipif: directive' +root_doc = 'skipif' +source_suffix = '.txt' +exclude_patterns = ['_build'] + +doctest_global_setup = ''' +from tests.test_ext_doctest import record + +record('doctest_global_setup', 'body', True) +''' + +doctest_global_cleanup = ''' +record('doctest_global_cleanup', 'body', True) +''' diff --git a/tests/roots/test-ext-doctest-skipif/skipif.txt b/tests/roots/test-ext-doctest-skipif/skipif.txt new file mode 100644 index 0000000..c5bd398 --- /dev/null +++ b/tests/roots/test-ext-doctest-skipif/skipif.txt @@ -0,0 +1,81 @@ +Testing the doctest extension's `:skipif:` option +================================================= + +testsetup +--------- + +.. testsetup:: group-skipif + :skipif: record('testsetup', ':skipif:', True) != 'this will be True' + + record('testsetup', 'body', True) + +.. testsetup:: group-skipif + :skipif: record('testsetup', ':skipif:', False) == 'this will be False' + + record('testsetup', 'body', False) + + +doctest +------- +.. doctest:: group-skipif + :skipif: record('doctest', ':skipif:', True) != 'this will be True' + + >>> print(record('doctest', 'body', True)) + The test is skipped, and this expected text is ignored + + +.. doctest:: + :skipif: record('doctest', ':skipif:', False) == 'this will be False' + + >>> print(record('doctest', 'body', False)) + Recorded doctest body False + + +testcode and testoutput +----------------------- + +testcode skipped +~~~~~~~~~~~~~~~~ + +.. testcode:: group-skipif + :skipif: record('testcode', ':skipif:', True) != 'this will be True' + + print(record('testcode', 'body', True)) + +.. testoutput:: group-skipif + :skipif: record('testoutput-1', ':skipif:', True) != 'this will be True' + + The previous testcode is skipped, and the :skipif: condition is True, + so this testoutput is ignored + +testcode executed +~~~~~~~~~~~~~~~~~ + +.. testcode:: group-skipif + :skipif: record('testcode', ':skipif:', False) == 'this will be False' + + print(record('testcode', 'body', False)) + +.. testoutput:: group-skipif + :skipif: record('testoutput-2', ':skipif:', False) == 'this will be False' + + Recorded testcode body False + +.. testoutput:: group-skipif + :skipif: record('testoutput-2', ':skipif:', True) != 'this will be True' + + The :skipif: condition is False, so this testoutput is ignored + + +testcleanup +----------- + +.. testcleanup:: group-skipif + :skipif: record('testcleanup', ':skipif:', True) != 'this will be True' + + record('testcleanup', 'body', True) + +.. testcleanup:: group-skipif + :skipif: record('testcleanup', ':skipif:', False) == 'this will be False' + + record('testcleanup', 'body', False) diff --git a/tests/roots/test-ext-doctest-with-autodoc/conf.py b/tests/roots/test-ext-doctest-with-autodoc/conf.py new file mode 100644 index 0000000..1ec1dd9 --- /dev/null +++ b/tests/roots/test-ext-doctest-with-autodoc/conf.py @@ -0,0 +1,7 @@ +import sys +from os import path + +sys.path.insert(0, path.abspath(path.dirname(__file__))) + +project = 'test project for doctest + autodoc reporting' +extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest'] diff --git a/tests/roots/test-ext-doctest-with-autodoc/dir/__init__.py b/tests/roots/test-ext-doctest-with-autodoc/dir/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-ext-doctest-with-autodoc/dir/__init__.py diff --git a/tests/roots/test-ext-doctest-with-autodoc/dir/bar.py b/tests/roots/test-ext-doctest-with-autodoc/dir/bar.py new file mode 100644 index 0000000..122fdf7 --- /dev/null +++ b/tests/roots/test-ext-doctest-with-autodoc/dir/bar.py @@ -0,0 +1,4 @@ +""" +>>> 'dir/bar.py:2' + +""" diff --git a/tests/roots/test-ext-doctest-with-autodoc/dir/inner.rst b/tests/roots/test-ext-doctest-with-autodoc/dir/inner.rst new file mode 100644 index 0000000..b2ee47f --- /dev/null +++ b/tests/roots/test-ext-doctest-with-autodoc/dir/inner.rst @@ -0,0 +1,4 @@ +>>> 'dir/inner.rst:1' + +.. automodule:: dir.bar + :members: diff --git a/tests/roots/test-ext-doctest-with-autodoc/foo.py b/tests/roots/test-ext-doctest-with-autodoc/foo.py new file mode 100644 index 0000000..9f62a19 --- /dev/null +++ b/tests/roots/test-ext-doctest-with-autodoc/foo.py @@ -0,0 +1,5 @@ +""" + +>>> 'foo.py:3' + +""" diff --git a/tests/roots/test-ext-doctest-with-autodoc/index.rst b/tests/roots/test-ext-doctest-with-autodoc/index.rst new file mode 100644 index 0000000..09d1239 --- /dev/null +++ b/tests/roots/test-ext-doctest-with-autodoc/index.rst @@ -0,0 +1,4 @@ +.. automodule:: foo + :members: + +>>> 'index.rst:4' diff --git a/tests/roots/test-ext-doctest/conf.py b/tests/roots/test-ext-doctest/conf.py new file mode 100644 index 0000000..d0e8b10 --- /dev/null +++ b/tests/roots/test-ext-doctest/conf.py @@ -0,0 +1,6 @@ +extensions = ['sphinx.ext.doctest'] + +project = 'test project for doctest' +root_doc = 'doctest' +source_suffix = '.txt' +exclude_patterns = ['_build'] diff --git a/tests/roots/test-ext-doctest/doctest.txt b/tests/roots/test-ext-doctest/doctest.txt new file mode 100644 index 0000000..04780cf --- /dev/null +++ b/tests/roots/test-ext-doctest/doctest.txt @@ -0,0 +1,163 @@ +Testing the doctest extension +============================= + +Simple doctest blocks +--------------------- + +>>> 1+1 +2 +>>> 1/0 +Traceback (most recent call last): + ... +ZeroDivisionError: integer division or modulo by zero + + +Special directives +------------------ + +* doctest + + .. doctest:: + + >>> 1+1 + 2 + >>> 1/0 + Traceback (most recent call last): + ... + ZeroDivisionError: integer division or modulo by zero + +* testcode/testoutput + + .. testcode:: + + print(1+1) + + .. testoutput:: + + 2 + + .. testcode:: + + 1/0 + + .. testoutput:: + + Traceback (most recent call last): + ... + ZeroDivisionError: integer division or modulo by zero + +* testsetup + + .. testsetup:: * + + def squared(x): + return x * x + + .. doctest:: + + >>> squared(2) + 4 + + .. testcode:: + + print(squared(2)) + + .. testoutput:: + + 4 + + >>> squared(2) + 4 + +* options for doctest/testcode/testoutput blocks + + .. testcode:: + :hide: + + print('Output text.') + + .. testoutput:: + :hide: + :options: +NORMALIZE_WHITESPACE + + Output text. + + .. doctest:: + :pyversion: >= 2.0 + + >>> a = 3 + >>> a + 3 + + .. doctest:: + :pyversion: < 2.0 + + >>> a = 3 + >>> a + 4 + +* grouping + + .. testsetup:: group1 + + def add(x, y): + return x + y + + + ``add`` is now known in "group1", but not in others. + + .. doctest:: group1 + + >>> add(1, 1) + 2 + + .. doctest:: group2 + + >>> add(1, 1) + Traceback (most recent call last): + ... + NameError: name 'add' is not defined + + Interleaving testcode/testoutput: + + .. testcode:: group1 + + print(squared(3)) + + .. testcode:: group2 + + print(squared(4)) + + .. testoutput:: group1 + + 9 + + .. testoutput:: group2 + + 16 + + +.. testcleanup:: * + + from tests import test_ext_doctest + test_ext_doctest.cleanup_call() + +non-ASCII result +---------------- + +>>> print('umlauts: äöü.') +umlauts: äöü. +>>> print('Japanese: 日本語') +Japanese: 日本語 + +keep control char in raw string +------------------------------- + +.. doctest:: + + >>> print('one\ntwo') + one + two + >>> print(r'one\ntwo') + one\ntwo + diff --git a/tests/roots/test-ext-extlinks-hardcoded-urls-multiple-replacements/conf.py b/tests/roots/test-ext-extlinks-hardcoded-urls-multiple-replacements/conf.py new file mode 100644 index 0000000..f463449 --- /dev/null +++ b/tests/roots/test-ext-extlinks-hardcoded-urls-multiple-replacements/conf.py @@ -0,0 +1,6 @@ +extensions = ['sphinx.ext.extlinks'] +extlinks = { + 'user': ('https://github.com/%s', '@%s'), + 'repo': ('https://github.com/%s', 'project %s'), +} +extlinks_detect_hardcoded_links = True diff --git a/tests/roots/test-ext-extlinks-hardcoded-urls-multiple-replacements/index.rst b/tests/roots/test-ext-extlinks-hardcoded-urls-multiple-replacements/index.rst new file mode 100644 index 0000000..162b361 --- /dev/null +++ b/tests/roots/test-ext-extlinks-hardcoded-urls-multiple-replacements/index.rst @@ -0,0 +1,24 @@ +test-ext-extlinks-hardcoded-urls +================================ + +.. Links generated by extlinks extension should not raise any warnings. +.. Only hardcoded URLs are affected. + +:user:`octocat` + +:repo:`sphinx-doc/sphinx` + +.. hardcoded replaceable link which can be replaced as +.. :repo:`octocat` or :user:`octocat` + +https://github.com/octocat + +`inline replaceable link <https://github.com/octocat>`_ + +`replaceable link`_ + +`non replaceable link <https://github.com/sphinx-doc/sphinx/pulls>`_ + +.. hyperlinks + +.. _replaceable link: https://github.com/octocat diff --git a/tests/roots/test-ext-extlinks-hardcoded-urls/conf.py b/tests/roots/test-ext-extlinks-hardcoded-urls/conf.py new file mode 100644 index 0000000..db0b341 --- /dev/null +++ b/tests/roots/test-ext-extlinks-hardcoded-urls/conf.py @@ -0,0 +1,3 @@ +extensions = ['sphinx.ext.extlinks'] +extlinks = {'issue': ('https://github.com/sphinx-doc/sphinx/issues/%s', 'issue %s')} +extlinks_detect_hardcoded_links = True diff --git a/tests/roots/test-ext-extlinks-hardcoded-urls/index.rst b/tests/roots/test-ext-extlinks-hardcoded-urls/index.rst new file mode 100644 index 0000000..ada6f07 --- /dev/null +++ b/tests/roots/test-ext-extlinks-hardcoded-urls/index.rst @@ -0,0 +1,28 @@ +test-ext-extlinks-hardcoded-urls +================================ + +.. Links generated by extlinks extension should not raise any warnings. +.. Only hardcoded URLs are affected. + +:issue:`1` + +.. hardcoded replaceable link + +https://github.com/sphinx-doc/sphinx/issues/1 + +`inline replaceable link <https://github.com/sphinx-doc/sphinx/issues/1>`_ + +`replaceable link`_ + +.. hardcoded non-replaceable link + +https://github.com/sphinx-doc/sphinx/pulls/1 + +`inline non-replaceable link <https://github.com/sphinx-doc/sphinx/pulls/1>`_ + +`non-replaceable link`_ + +.. hyperlinks + +.. _replaceable link: https://github.com/sphinx-doc/sphinx/issues/1 +.. _non-replaceable link: https://github.com/sphinx-doc/sphinx/pulls/1 diff --git a/tests/roots/test-ext-githubpages/conf.py b/tests/roots/test-ext-githubpages/conf.py new file mode 100644 index 0000000..3577096 --- /dev/null +++ b/tests/roots/test-ext-githubpages/conf.py @@ -0,0 +1 @@ +extensions = ['sphinx.ext.githubpages'] diff --git a/tests/roots/test-ext-githubpages/index.rst b/tests/roots/test-ext-githubpages/index.rst new file mode 100644 index 0000000..711847f --- /dev/null +++ b/tests/roots/test-ext-githubpages/index.rst @@ -0,0 +1,3 @@ +githubpages +=========== + diff --git a/tests/roots/test-ext-graphviz/_static/images/test.svg b/tests/roots/test-ext-graphviz/_static/images/test.svg new file mode 100644 index 0000000..6134f44 --- /dev/null +++ b/tests/roots/test-ext-graphviz/_static/images/test.svg @@ -0,0 +1,8 @@ +<?xml version="1.0" encoding="UTF-8"?> +<svg version="1.1" + height="128" width="128" + xmlns="http://www.w3.org/2000/svg"> + + <rect width="100%" height="100%" fill="red" /> + +</svg> diff --git a/tests/roots/test-ext-graphviz/conf.py b/tests/roots/test-ext-graphviz/conf.py new file mode 100644 index 0000000..317457f --- /dev/null +++ b/tests/roots/test-ext-graphviz/conf.py @@ -0,0 +1,3 @@ +extensions = ['sphinx.ext.graphviz'] +exclude_patterns = ['_build'] +html_static_path = ["_static"] diff --git a/tests/roots/test-ext-graphviz/graph.dot b/tests/roots/test-ext-graphviz/graph.dot new file mode 100644 index 0000000..ca57244 --- /dev/null +++ b/tests/roots/test-ext-graphviz/graph.dot @@ -0,0 +1,3 @@ +digraph { + bar -> baz +} diff --git a/tests/roots/test-ext-graphviz/graph.xx.dot b/tests/roots/test-ext-graphviz/graph.xx.dot new file mode 100644 index 0000000..e5add5c --- /dev/null +++ b/tests/roots/test-ext-graphviz/graph.xx.dot @@ -0,0 +1,3 @@ +digraph { + BAR -> BAZ +} diff --git a/tests/roots/test-ext-graphviz/index.rst b/tests/roots/test-ext-graphviz/index.rst new file mode 100644 index 0000000..cb0f069 --- /dev/null +++ b/tests/roots/test-ext-graphviz/index.rst @@ -0,0 +1,43 @@ +graphviz +======== + +.. digraph:: foo + :caption: caption of graph + + bar -> baz + +.. |graph| digraph:: bar + + bar -> baz + +Hello |graph| graphviz world + +.. digraph:: foo + :graphviz_dot: neato + :class: neato_graph + + baz -> qux + + +.. graphviz:: graph.dot + +.. digraph:: bar + :align: right + :caption: on *right* + + foo -> bar + +.. digraph:: foo + :align: center + + centered + +.. graphviz:: + :align: center + + digraph test { + foo [label="foo", URL="#graphviz", target="_parent"] + bar [label="bar", image="./_static/images/test.svg"] + baz [label="baz", URL="./_static/images/test.svg"] + foo -> bar -> baz + } diff --git a/tests/roots/test-ext-ifconfig/conf.py b/tests/roots/test-ext-ifconfig/conf.py new file mode 100644 index 0000000..e82ec79 --- /dev/null +++ b/tests/roots/test-ext-ifconfig/conf.py @@ -0,0 +1,10 @@ +extensions = ['sphinx.ext.ifconfig'] +exclude_patterns = ['_build'] + +confval1 = True + + +def setup(app): + app.add_config_value('confval1', False, None) + app.add_config_value('confval2', False, None) + app.add_config_value('false_config', False, None) diff --git a/tests/roots/test-ext-ifconfig/index.rst b/tests/roots/test-ext-ifconfig/index.rst new file mode 100644 index 0000000..f7fabcc --- /dev/null +++ b/tests/roots/test-ext-ifconfig/index.rst @@ -0,0 +1,21 @@ +ifconfig +======== + +.. ifconfig:: confval1 + + spam + +.. ifconfig:: confval2 + + egg + +Issue 10496 regression test +=========================== + +.. ifconfig:: false_config + + `Link 1 <https://link1.example>`__ + +.. ifconfig:: false_config + + `Link 2 <https://link2.example>`__ diff --git a/tests/roots/test-ext-imgconverter/conf.py b/tests/roots/test-ext-imgconverter/conf.py new file mode 100644 index 0000000..6a2e75d --- /dev/null +++ b/tests/roots/test-ext-imgconverter/conf.py @@ -0,0 +1 @@ +extensions = ['sphinx.ext.imgconverter'] diff --git a/tests/roots/test-ext-imgconverter/img.pdf b/tests/roots/test-ext-imgconverter/img.pdf Binary files differnew file mode 100644 index 0000000..cacbd85 --- /dev/null +++ b/tests/roots/test-ext-imgconverter/img.pdf diff --git a/tests/roots/test-ext-imgconverter/index.rst b/tests/roots/test-ext-imgconverter/index.rst new file mode 100644 index 0000000..f8ef1d6 --- /dev/null +++ b/tests/roots/test-ext-imgconverter/index.rst @@ -0,0 +1,5 @@ +test-ext-imgconverter +===================== + +.. image:: svgimg.svg +.. image:: img.pdf diff --git a/tests/roots/test-ext-imgconverter/svgimg.svg b/tests/roots/test-ext-imgconverter/svgimg.svg new file mode 100644 index 0000000..2bae0b9 --- /dev/null +++ b/tests/roots/test-ext-imgconverter/svgimg.svg @@ -0,0 +1,4 @@ +<?xml version="1.0" encoding="UTF-8" standalone="no"?> +<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" height="60" width="60"> + <circle cx="40" cy="40" r="24" style="stroke:#000000; fill:#ffffff"/> +</svg> diff --git a/tests/roots/test-ext-imgmockconverter/1/svgimg.svg b/tests/roots/test-ext-imgmockconverter/1/svgimg.svg new file mode 100644 index 0000000..981e301 --- /dev/null +++ b/tests/roots/test-ext-imgmockconverter/1/svgimg.svg @@ -0,0 +1,3 @@ +<svg viewBox="0 0 100 100" xmlns="http://www.w3.org/2000/svg"> + <circle cx="50" cy="50" r="50" /> +</svg> diff --git a/tests/roots/test-ext-imgmockconverter/2/svgimg.svg b/tests/roots/test-ext-imgmockconverter/2/svgimg.svg new file mode 100644 index 0000000..2bae0b9 --- /dev/null +++ b/tests/roots/test-ext-imgmockconverter/2/svgimg.svg @@ -0,0 +1,4 @@ +<?xml version="1.0" encoding="UTF-8" standalone="no"?> +<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" height="60" width="60"> + <circle cx="40" cy="40" r="24" style="stroke:#000000; fill:#ffffff"/> +</svg> diff --git a/tests/roots/test-ext-imgmockconverter/conf.py b/tests/roots/test-ext-imgmockconverter/conf.py new file mode 100644 index 0000000..679bb5a --- /dev/null +++ b/tests/roots/test-ext-imgmockconverter/conf.py @@ -0,0 +1,5 @@ +import os +import sys + +sys.path.insert(0, os.path.abspath('.')) +extensions = ['mocksvgconverter'] diff --git a/tests/roots/test-ext-imgmockconverter/index.rst b/tests/roots/test-ext-imgmockconverter/index.rst new file mode 100644 index 0000000..bc665f6 --- /dev/null +++ b/tests/roots/test-ext-imgmockconverter/index.rst @@ -0,0 +1,6 @@ +test-ext-imgconverter +===================== + +.. image:: ./1/svgimg.svg +.. image:: ./2/svgimg.svg + diff --git a/tests/roots/test-ext-imgmockconverter/mocksvgconverter.py b/tests/roots/test-ext-imgmockconverter/mocksvgconverter.py new file mode 100644 index 0000000..43368de --- /dev/null +++ b/tests/roots/test-ext-imgmockconverter/mocksvgconverter.py @@ -0,0 +1,39 @@ +""" + Does foo.svg --> foo.pdf with no change to the file. +""" + +import shutil + +from sphinx.transforms.post_transforms.images import ImageConverter + +if False: + # For type annotation + from typing import Any, Dict # NOQA + + from sphinx.application import Sphinx # NOQA + +class MyConverter(ImageConverter): + conversion_rules = [ + ('image/svg+xml', 'application/pdf'), + ] + + def is_available(self): + # type: () -> bool + return True + + def convert(self, _from, _to): + # type: (unicode, unicode) -> bool + """Mock converts the image from SVG to PDF.""" + shutil.copyfile(_from, _to) + return True + + +def setup(app): + # type: (Sphinx) -> Dict[unicode, Any] + app.add_post_transform(MyConverter) + + return { + 'version': 'builtin', + 'parallel_read_safe': True, + 'parallel_write_safe': True, + } diff --git a/tests/roots/test-ext-inheritance_diagram/conf.py b/tests/roots/test-ext-inheritance_diagram/conf.py new file mode 100644 index 0000000..d3778d5 --- /dev/null +++ b/tests/roots/test-ext-inheritance_diagram/conf.py @@ -0,0 +1,6 @@ +import os +import sys + +sys.path.insert(0, os.path.abspath('.')) + +extensions = ['sphinx.ext.inheritance_diagram', 'sphinx.ext.intersphinx'] diff --git a/tests/roots/test-ext-inheritance_diagram/example/__init__.py b/tests/roots/test-ext-inheritance_diagram/example/__init__.py new file mode 100644 index 0000000..2f85c08 --- /dev/null +++ b/tests/roots/test-ext-inheritance_diagram/example/__init__.py @@ -0,0 +1 @@ +# example.py diff --git a/tests/roots/test-ext-inheritance_diagram/example/sphinx.py b/tests/roots/test-ext-inheritance_diagram/example/sphinx.py new file mode 100644 index 0000000..2bfbf4c --- /dev/null +++ b/tests/roots/test-ext-inheritance_diagram/example/sphinx.py @@ -0,0 +1,5 @@ +# example.sphinx + + +class DummyClass: + pass diff --git a/tests/roots/test-ext-inheritance_diagram/external/other.py b/tests/roots/test-ext-inheritance_diagram/external/other.py new file mode 100644 index 0000000..a4ce8a7 --- /dev/null +++ b/tests/roots/test-ext-inheritance_diagram/external/other.py @@ -0,0 +1,5 @@ +from test import Alice + + +class Bob(Alice): + pass diff --git a/tests/roots/test-ext-inheritance_diagram/index.rst b/tests/roots/test-ext-inheritance_diagram/index.rst new file mode 100644 index 0000000..e694fb0 --- /dev/null +++ b/tests/roots/test-ext-inheritance_diagram/index.rst @@ -0,0 +1,18 @@ +============================ +test-ext-inheritance_diagram +============================ + +.. inheritance-diagram:: test.Foo + +.. inheritance-diagram:: test.Foo + :caption: Test Foo! + +.. inheritance-diagram:: test.DocSubDir2 + +.. py:class:: test.DocHere + +.. py:class:: test.DocMainLevel + +.. inheritance-diagram:: external.other.Bob + +.. py:class:: test.Alice diff --git a/tests/roots/test-ext-inheritance_diagram/subdir/page1.rst b/tests/roots/test-ext-inheritance_diagram/subdir/page1.rst new file mode 100644 index 0000000..3001b02 --- /dev/null +++ b/tests/roots/test-ext-inheritance_diagram/subdir/page1.rst @@ -0,0 +1,9 @@ +================================================ +test-ext-inheritance_diagram subdirectory page 1 +================================================ + +.. inheritance-diagram:: test.DocMainLevel + +.. inheritance-diagram:: test.DocSubDir2 + +.. py:class:: test.DocSubDir1 diff --git a/tests/roots/test-ext-inheritance_diagram/subdir/page2.rst b/tests/roots/test-ext-inheritance_diagram/subdir/page2.rst new file mode 100644 index 0000000..720e2d8 --- /dev/null +++ b/tests/roots/test-ext-inheritance_diagram/subdir/page2.rst @@ -0,0 +1,5 @@ +================================================ +test-ext-inheritance_diagram subdirectory page 2 +================================================ + +.. py:class:: test.DocSubDir2 diff --git a/tests/roots/test-ext-inheritance_diagram/test.py b/tests/roots/test-ext-inheritance_diagram/test.py new file mode 100644 index 0000000..efb1c2a --- /dev/null +++ b/tests/roots/test-ext-inheritance_diagram/test.py @@ -0,0 +1,22 @@ +class Foo: + pass + + +class DocHere(Foo): + pass + + +class DocSubDir1(DocHere): + pass + + +class DocSubDir2(DocSubDir1): + pass + + +class DocMainLevel(Foo): + pass + + +class Alice(object): + pass diff --git a/tests/roots/test-ext-intersphinx-cppdomain/conf.py b/tests/roots/test-ext-intersphinx-cppdomain/conf.py new file mode 100644 index 0000000..9485eb2 --- /dev/null +++ b/tests/roots/test-ext-intersphinx-cppdomain/conf.py @@ -0,0 +1 @@ +extensions = ['sphinx.ext.intersphinx'] diff --git a/tests/roots/test-ext-intersphinx-cppdomain/index.rst b/tests/roots/test-ext-intersphinx-cppdomain/index.rst new file mode 100644 index 0000000..bf67d52 --- /dev/null +++ b/tests/roots/test-ext-intersphinx-cppdomain/index.rst @@ -0,0 +1,8 @@ +test-ext-intersphinx-cppdomain +============================== + +.. cpp:namespace:: foo + +:cpp:class:`Bar` + +.. cpp:function:: foons::bartype FooBarBaz() diff --git a/tests/roots/test-ext-intersphinx-role/conf.py b/tests/roots/test-ext-intersphinx-role/conf.py new file mode 100644 index 0000000..a54f5c2 --- /dev/null +++ b/tests/roots/test-ext-intersphinx-role/conf.py @@ -0,0 +1,3 @@ +extensions = ['sphinx.ext.intersphinx'] +# the role should not honor this conf var +intersphinx_disabled_reftypes = ['*'] diff --git a/tests/roots/test-ext-intersphinx-role/index.rst b/tests/roots/test-ext-intersphinx-role/index.rst new file mode 100644 index 0000000..58edb7a --- /dev/null +++ b/tests/roots/test-ext-intersphinx-role/index.rst @@ -0,0 +1,44 @@ +- ``module1`` is only defined in ``inv``: + :external:py:mod:`module1` + +.. py:module:: module2 + +- ``module2`` is defined here and also in ``inv``, but should resolve to inv: + :external:py:mod:`module2` + +- ``module3`` is not defined anywhere, so should warn: + :external:py:mod:`module3` + +.. py:module:: module10 + +- ``module10`` is only defined here, but should still not be resolved to: + :external:py:mod:`module10` + +- a function in inv: + :external:py:func:`module1.func` +- a method, but with old style inventory prefix, which shouldn't work: + :external:py:meth:`inv:Foo.bar` +- a non-existing role: + :external:py:nope:`something` + +.. default-domain:: cpp + +- a type where the default domain is used to find the role: + :external:type:`std::uint8_t` +- a non-existing role in default domain: + :external:nope:`somethingElse` + +- two roles in ``std`` which can be found without a default domain: + + - :external:doc:`docname` + - :external:option:`ls -l` + + +- a function with explicit inventory: + :external+inv:c:func:`CFunc` +- a class with explicit non-existing inventory, which also has upper-case in name: + :external+invNope:cpp:class:`foo::Bar` + + +- explicit title: + :external:cpp:type:`FoonsTitle <foons>` diff --git a/tests/roots/test-ext-math-compat/conf.py b/tests/roots/test-ext-math-compat/conf.py new file mode 100644 index 0000000..85e3950 --- /dev/null +++ b/tests/roots/test-ext-math-compat/conf.py @@ -0,0 +1,20 @@ +from docutils import nodes +from docutils.parsers.rst import Directive + +extensions = ['sphinx.ext.mathjax'] + + +def my_math_role(role, rawtext, text, lineno, inliner, options={}, content=[]): + text = 'E = mc^2' + return [nodes.math(text, text)], [] + + +class MyMathDirective(Directive): + def run(self): + text = 'E = mc^2' + return [nodes.math_block(text, text)] + + +def setup(app): + app.add_role('my_math', my_math_role) + app.add_directive('my-math', MyMathDirective) diff --git a/tests/roots/test-ext-math-compat/index.rst b/tests/roots/test-ext-math-compat/index.rst new file mode 100644 index 0000000..208878c --- /dev/null +++ b/tests/roots/test-ext-math-compat/index.rst @@ -0,0 +1,21 @@ +Test Math +========= + +inline +------ + +Inline: :math:`E=mc^2` +Inline my math: :my_math:`:-)` + +block +----- + +.. math:: a^2+b^2=c^2 + +Second math + +.. math:: e^{i\pi}+1=0 + +Multi math equations + +.. my-math:: diff --git a/tests/roots/test-ext-math-simple/conf.py b/tests/roots/test-ext-math-simple/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-ext-math-simple/conf.py diff --git a/tests/roots/test-ext-math-simple/index.rst b/tests/roots/test-ext-math-simple/index.rst new file mode 100644 index 0000000..a455d89 --- /dev/null +++ b/tests/roots/test-ext-math-simple/index.rst @@ -0,0 +1,4 @@ +Test Math +========= + +.. math:: a^2+b^2=c^2 diff --git a/tests/roots/test-ext-math/conf.py b/tests/roots/test-ext-math/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-ext-math/conf.py diff --git a/tests/roots/test-ext-math/index.rst b/tests/roots/test-ext-math/index.rst new file mode 100644 index 0000000..221284a --- /dev/null +++ b/tests/roots/test-ext-math/index.rst @@ -0,0 +1,25 @@ +Test Math +========= + +.. toctree:: + :numbered: 1 + + math + page + nomath + +.. math:: a^2+b^2=c^2 + +Inline :math:`E=mc^2` + +Second math + +.. math:: e^{i\pi}+1=0 + +Multi math equations + +.. math:: + + S &= \pi r^2 + + V &= \frac{4}{3} \pi r^3 diff --git a/tests/roots/test-ext-math/math.rst b/tests/roots/test-ext-math/math.rst new file mode 100644 index 0000000..c05c3a0 --- /dev/null +++ b/tests/roots/test-ext-math/math.rst @@ -0,0 +1,31 @@ +Test math extensions :math:`E = m c^2` +====================================== + +This is inline math: :math:`a^2 + b^2 = c^2`. + +.. math:: a^2 + b^2 = c^2 + +.. math:: + + a + 1 < b + +.. math:: + :label: foo + + e^{i\pi} = 1 + +.. math:: + :label: + + e^{ix} = \cos x + i\sin x + +.. math:: + + n \in \mathbb N + +.. math:: + :nowrap: + + a + 1 < b + +Referencing equation :eq:`foo` and :math:numref:`foo`. diff --git a/tests/roots/test-ext-math/nomath.rst b/tests/roots/test-ext-math/nomath.rst new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-ext-math/nomath.rst diff --git a/tests/roots/test-ext-math/page.rst b/tests/roots/test-ext-math/page.rst new file mode 100644 index 0000000..ef80409 --- /dev/null +++ b/tests/roots/test-ext-math/page.rst @@ -0,0 +1,9 @@ +Test multiple pages +=================== + +.. math:: + :label: bar + + a = b + 1 + +Referencing equations :eq:`foo` and :eq:`bar`. diff --git a/tests/roots/test-ext-napoleon/conf.py b/tests/roots/test-ext-napoleon/conf.py new file mode 100644 index 0000000..502fb5a --- /dev/null +++ b/tests/roots/test-ext-napoleon/conf.py @@ -0,0 +1,5 @@ +import os +import sys + +sys.path.insert(0, os.path.abspath('.')) +extensions = ['sphinx.ext.napoleon'] diff --git a/tests/roots/test-ext-napoleon/index.rst b/tests/roots/test-ext-napoleon/index.rst new file mode 100644 index 0000000..4c013b7 --- /dev/null +++ b/tests/roots/test-ext-napoleon/index.rst @@ -0,0 +1,6 @@ +test-ext-napoleon +================= + +.. toctree:: + + typehints diff --git a/tests/roots/test-ext-napoleon/mypackage/__init__.py b/tests/roots/test-ext-napoleon/mypackage/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-ext-napoleon/mypackage/__init__.py diff --git a/tests/roots/test-ext-napoleon/mypackage/typehints.py b/tests/roots/test-ext-napoleon/mypackage/typehints.py new file mode 100644 index 0000000..526b78e --- /dev/null +++ b/tests/roots/test-ext-napoleon/mypackage/typehints.py @@ -0,0 +1,11 @@ +def hello(x: int, *args: int, **kwargs: int) -> None: + """ + Parameters + ---------- + x + X + *args + Additional arguments. + **kwargs + Extra arguments. + """ diff --git a/tests/roots/test-ext-napoleon/typehints.rst b/tests/roots/test-ext-napoleon/typehints.rst new file mode 100644 index 0000000..43c61f6 --- /dev/null +++ b/tests/roots/test-ext-napoleon/typehints.rst @@ -0,0 +1,5 @@ +typehints +========= + +.. automodule:: mypackage.typehints + :members: diff --git a/tests/roots/test-ext-todo/bar.rst b/tests/roots/test-ext-todo/bar.rst new file mode 100644 index 0000000..6804a68 --- /dev/null +++ b/tests/roots/test-ext-todo/bar.rst @@ -0,0 +1,4 @@ +bar +=== + +.. todo:: todo in bar diff --git a/tests/roots/test-ext-todo/conf.py b/tests/roots/test-ext-todo/conf.py new file mode 100644 index 0000000..64b749e --- /dev/null +++ b/tests/roots/test-ext-todo/conf.py @@ -0,0 +1 @@ +extensions = ['sphinx.ext.todo'] diff --git a/tests/roots/test-ext-todo/foo.rst b/tests/roots/test-ext-todo/foo.rst new file mode 100644 index 0000000..12e9f63 --- /dev/null +++ b/tests/roots/test-ext-todo/foo.rst @@ -0,0 +1,10 @@ +foo +=== + +.. todo:: todo in foo + +.. py:function:: hello() + + :param bug: #5800 + + .. todo:: todo in param field diff --git a/tests/roots/test-ext-todo/index.rst b/tests/roots/test-ext-todo/index.rst new file mode 100644 index 0000000..781473d --- /dev/null +++ b/tests/roots/test-ext-todo/index.rst @@ -0,0 +1,11 @@ +test for sphinx.ext.todo +======================== + +.. toctree:: + + foo + bar + +.. todolist:: + +.. todolist:: diff --git a/tests/roots/test-ext-viewcode-find/conf.py b/tests/roots/test-ext-viewcode-find/conf.py new file mode 100644 index 0000000..18f97f4 --- /dev/null +++ b/tests/roots/test-ext-viewcode-find/conf.py @@ -0,0 +1,3 @@ +extensions = ['sphinx.ext.viewcode'] +exclude_patterns = ['_build'] +viewcode_follow_imported_members = False diff --git a/tests/roots/test-ext-viewcode-find/index.rst b/tests/roots/test-ext-viewcode-find/index.rst new file mode 100644 index 0000000..7eb416a --- /dev/null +++ b/tests/roots/test-ext-viewcode-find/index.rst @@ -0,0 +1,38 @@ +viewcode +======== + +.. py:module:: not_a_package + +.. py:function:: func1(a, b) + + This is func1 + +.. py:function:: not_a_package.submodule.func1(a, b) + + This is func1 + +.. py:module:: not_a_package.submodule + +.. py:class:: Class1 + + This is Class1 + +.. py:class:: Class3 + + This is Class3 + +.. py:class:: not_a_package.submodule.Class1 + + This is Class1 + +.. literalinclude:: not_a_package/__init__.py + :language: python + :pyobject: func1 + +.. literalinclude:: not_a_package/submodule.py + :language: python + :pyobject: func1 + +.. py:attribute:: not_a_package.submodule.Class3.class_attr + + This is the class attribute class_attr diff --git a/tests/roots/test-ext-viewcode-find/not_a_package/__init__.py b/tests/roots/test-ext-viewcode-find/not_a_package/__init__.py new file mode 100644 index 0000000..2382935 --- /dev/null +++ b/tests/roots/test-ext-viewcode-find/not_a_package/__init__.py @@ -0,0 +1 @@ +from .submodule import Class1, func1 diff --git a/tests/roots/test-ext-viewcode-find/not_a_package/submodule.py b/tests/roots/test-ext-viewcode-find/not_a_package/submodule.py new file mode 100644 index 0000000..ba8be78 --- /dev/null +++ b/tests/roots/test-ext-viewcode-find/not_a_package/submodule.py @@ -0,0 +1,31 @@ +""" +submodule +""" +raise RuntimeError('This module should not get imported') + + +def decorator(f): + return f + + +@decorator +def func1(a, b): + """ + this is func1 + """ + return a, b + + +@decorator +class Class1: + """ + this is Class1 + """ + + +class Class3: + """ + this is Class3 + """ + class_attr = 42 + """this is the class attribute class_attr""" diff --git a/tests/roots/test-ext-viewcode/conf.py b/tests/roots/test-ext-viewcode/conf.py new file mode 100644 index 0000000..5e07214 --- /dev/null +++ b/tests/roots/test-ext-viewcode/conf.py @@ -0,0 +1,24 @@ +import os +import sys + +source_dir = os.path.abspath('.') +if source_dir not in sys.path: + sys.path.insert(0, source_dir) +extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode'] +exclude_patterns = ['_build'] + + +if 'test_linkcode' in tags: + extensions.remove('sphinx.ext.viewcode') + extensions.append('sphinx.ext.linkcode') + + def linkcode_resolve(domain, info): + if domain == 'py': + fn = info['module'].replace('.', '/') + return "http://foobar/source/%s.py" % fn + elif domain == "js": + return "http://foobar/js/" + info['fullname'] + elif domain in ("c", "cpp"): + return f"http://foobar/{domain}/{''.join(info['names'])}" + else: + raise AssertionError() diff --git a/tests/roots/test-ext-viewcode/index.rst b/tests/roots/test-ext-viewcode/index.rst new file mode 100644 index 0000000..e7956e7 --- /dev/null +++ b/tests/roots/test-ext-viewcode/index.rst @@ -0,0 +1,39 @@ +viewcode +======== + +.. py:module:: spam + +.. autofunction:: func1 + +.. autofunction:: func2 + +.. autofunction:: spam.mod1.func1 + +.. autofunction:: spam.mod2.func2 + +.. autofunction:: Class1 + +.. autofunction:: Class2 + +.. autofunction:: spam.mod1.Class1 + +.. autofunction:: spam.mod2.Class2 + + +.. literalinclude:: spam/__init__.py + :language: python + :pyobject: func1 + +.. literalinclude:: spam/mod1.py + :language: python + :pyobject: func1 + +.. autoclass:: spam.mod3.Class3 + :members: + +.. automodule:: spam.mod3 + :members: + +.. toctree:: + + objects diff --git a/tests/roots/test-ext-viewcode/objects.rst b/tests/roots/test-ext-viewcode/objects.rst new file mode 100644 index 0000000..114adbf --- /dev/null +++ b/tests/roots/test-ext-viewcode/objects.rst @@ -0,0 +1,169 @@ +Testing object descriptions +=========================== + +.. function:: func_without_module(a, b, *c[, d]) + + Does something. + +.. function:: func_without_body() + +.. function:: func_noindex + :no-index: + +.. function:: func_with_module + :module: foolib + +Referring to :func:`func with no index <func_noindex>`. +Referring to :func:`nothing <>`. + +.. module:: mod + :synopsis: Module synopsis. + :platform: UNIX + +.. function:: func_in_module + +.. class:: Cls + + .. method:: meth1 + + .. staticmethod:: meths + + .. attribute:: attr + +.. explicit class given +.. method:: Cls.meth2 + +.. explicit module given +.. exception:: Error(arg1, arg2) + :module: errmod + +.. data:: var + + +.. currentmodule:: None + +.. function:: func_without_module2() -> annotation + +.. object:: long(parameter, \ + list) + another one + +.. class:: TimeInt + + Has only one parameter (triggers special behavior...) + + :param moo: |test| + :type moo: |test| + +.. |test| replace:: Moo + +.. class:: Time(hour, minute, isdst) + + :param year: The year. + :type year: TimeInt + :param TimeInt minute: The minute. + :param isdst: whether it's DST + :type isdst: * some complex + * expression + :returns: a new :class:`Time` instance + :rtype: :class:`Time` + :raises ValueError: if the values are out of range + :ivar int hour: like *hour* + :ivar minute: like *minute* + :vartype minute: int + :param hour: Some parameter + :type hour: DuplicateType + :param hour: Duplicate param. Should not lead to crashes. + :type hour: DuplicateType + :param .Cls extcls: A class from another module. + + +C items +======= + +.. c:function:: Sphinx_DoSomething() + +.. c:member:: SphinxStruct.member + +.. c:macro:: SPHINX_USE_PYTHON + +.. c:type:: SphinxType + +.. c:var:: sphinx_global + + +Javascript items +================ + +.. js:function:: foo() + +.. js:data:: bar + +.. documenting the method of any object +.. js:function:: bar.baz(href, callback[, errback]) + + :param string href: The location of the resource. + :param callback: Gets called with the data returned by the resource. + :throws InvalidHref: If the `href` is invalid. + :returns: `undefined` + +.. js:attribute:: bar.spam + +References +========== + +Referencing :class:`mod.Cls` or :Class:`mod.Cls` should be the same. + +With target: :c:func:`Sphinx_DoSomething()` (parentheses are handled), +:c:member:`SphinxStruct.member`, :c:macro:`SPHINX_USE_PYTHON`, +:c:type:`SphinxType *` (pointer is handled), :c:data:`sphinx_global`. + +Without target: :c:func:`CFunction`. :c:func:`!malloc`. + +:js:func:`foo()` +:js:func:`foo` + +:js:data:`bar` +:js:func:`bar.baz()` +:js:func:`bar.baz` +:js:func:`~bar.baz()` + +:js:attr:`bar.baz` + + +Others +====== + +.. envvar:: HOME + +.. program:: python + +.. cmdoption:: -c command + +.. program:: perl + +.. cmdoption:: -c + +.. option:: +p + +Link to :option:`perl +p`. + + +User markup +=========== + +.. userdesc:: myobj:parameter + + Description of userdesc. + + +Referencing :userdescrole:`myobj`. + + +CPP domain +========== + +.. cpp:class:: n::Array<T,d> + + .. cpp:function:: T& operator[]( unsigned j ) + const T& operator[]( unsigned j ) const diff --git a/tests/roots/test-ext-viewcode/spam/__init__.py b/tests/roots/test-ext-viewcode/spam/__init__.py new file mode 100644 index 0000000..6219042 --- /dev/null +++ b/tests/roots/test-ext-viewcode/spam/__init__.py @@ -0,0 +1,2 @@ +from .mod1 import Class1, func1 +from .mod2 import Class2, func2 diff --git a/tests/roots/test-ext-viewcode/spam/mod1.py b/tests/roots/test-ext-viewcode/spam/mod1.py new file mode 100644 index 0000000..a078328 --- /dev/null +++ b/tests/roots/test-ext-viewcode/spam/mod1.py @@ -0,0 +1,30 @@ +""" +mod1 +""" + + +def decorator(f): + return f + + +@decorator +def func1(a, b): + """ + this is func1 + """ + return a, b + + +@decorator +class Class1: + """ + this is Class1 + """ + + +class Class3: + """ + this is Class3 + """ + class_attr = 42 + """this is the class attribute class_attr""" diff --git a/tests/roots/test-ext-viewcode/spam/mod2.py b/tests/roots/test-ext-viewcode/spam/mod2.py new file mode 100644 index 0000000..72cb089 --- /dev/null +++ b/tests/roots/test-ext-viewcode/spam/mod2.py @@ -0,0 +1,22 @@ +""" +mod2 +""" + + +def decorator(f): + return f + + +@decorator +def func2(a, b): + """ + this is func2 + """ + return a, b + + +@decorator +class Class2: + """ + this is Class2 + """ diff --git a/tests/roots/test-ext-viewcode/spam/mod3.py b/tests/roots/test-ext-viewcode/spam/mod3.py new file mode 100644 index 0000000..812c9b5 --- /dev/null +++ b/tests/roots/test-ext-viewcode/spam/mod3.py @@ -0,0 +1,3 @@ +from spam.mod1 import Class3 + +__all__ = ('Class3',) diff --git a/tests/roots/test-extensions/conf.py b/tests/roots/test-extensions/conf.py new file mode 100644 index 0000000..9a3cbc8 --- /dev/null +++ b/tests/roots/test-extensions/conf.py @@ -0,0 +1,4 @@ +import os +import sys + +sys.path.insert(0, os.path.abspath('.')) diff --git a/tests/roots/test-extensions/read_parallel.py b/tests/roots/test-extensions/read_parallel.py new file mode 100644 index 0000000..a3e052f --- /dev/null +++ b/tests/roots/test-extensions/read_parallel.py @@ -0,0 +1,4 @@ +def setup(app): + return { + 'parallel_read_safe': True + } diff --git a/tests/roots/test-extensions/read_serial.py b/tests/roots/test-extensions/read_serial.py new file mode 100644 index 0000000..c55570a --- /dev/null +++ b/tests/roots/test-extensions/read_serial.py @@ -0,0 +1,4 @@ +def setup(app): + return { + 'parallel_read_safe': False + } diff --git a/tests/roots/test-extensions/write_parallel.py b/tests/roots/test-extensions/write_parallel.py new file mode 100644 index 0000000..ebc48ef --- /dev/null +++ b/tests/roots/test-extensions/write_parallel.py @@ -0,0 +1,4 @@ +def setup(app): + return { + 'parallel_write_safe': True, + } diff --git a/tests/roots/test-extensions/write_serial.py b/tests/roots/test-extensions/write_serial.py new file mode 100644 index 0000000..75494ce --- /dev/null +++ b/tests/roots/test-extensions/write_serial.py @@ -0,0 +1,4 @@ +def setup(app): + return { + 'parallel_write_safe': False + } diff --git a/tests/roots/test-footnotes/bar.rst b/tests/roots/test-footnotes/bar.rst new file mode 100644 index 0000000..660c663 --- /dev/null +++ b/tests/roots/test-footnotes/bar.rst @@ -0,0 +1,6 @@ +bar +=== + +Same footnote number [1]_ in bar.rst + +.. [1] footnote in bar diff --git a/tests/roots/test-footnotes/baz.rst b/tests/roots/test-footnotes/baz.rst new file mode 100644 index 0000000..af496c5 --- /dev/null +++ b/tests/roots/test-footnotes/baz.rst @@ -0,0 +1,6 @@ +baz +=== + +Auto footnote number [#]_ in baz.rst + +.. [#] footnote in baz diff --git a/tests/roots/test-footnotes/conf.py b/tests/roots/test-footnotes/conf.py new file mode 100644 index 0000000..a45d22e --- /dev/null +++ b/tests/roots/test-footnotes/conf.py @@ -0,0 +1 @@ +exclude_patterns = ['_build'] diff --git a/tests/roots/test-footnotes/index.rst b/tests/roots/test-footnotes/index.rst new file mode 100644 index 0000000..f2c5d0e --- /dev/null +++ b/tests/roots/test-footnotes/index.rst @@ -0,0 +1,188 @@ +=============== +test-footenotes +=============== + +.. toctree:: + + bar + baz + +.. contents:: + :local: + +The section with a reference to [AuthorYear]_ +============================================= + +.. figure:: rimg.png + + This is the figure caption with a reference to [AuthorYear]_. + +.. list-table:: The table title with a reference to [AuthorYear]_ + :header-rows: 1 + + * - Header1 + - Header2 + * - Content + - Content + +.. rubric:: The rubric title with a reference to [AuthorYear]_ + +.. [#] First + +* First footnote: [#]_ +* Second footnote: [1]_ +* `Sphinx <http://sphinx-doc.org/>`_ +* Third footnote: [#]_ +* Fourth footnote: [#named]_ +* `URL including tilde <http://sphinx-doc.org/~test/>`_ +* GitHub Page: `https://github.com/sphinx-doc/sphinx <https://github.com/sphinx-doc/sphinx>`_ +* Mailing list: `sphinx-dev@googlegroups.com <mailto:sphinx-dev@googlegroups.com>`_ + +.. [AuthorYear] Author, Title, Year +.. [1] Second +.. [#] Third [#]_ +.. [#] Footnote inside footnote +.. [#named] Fourth + +The section with a reference to [#]_ +===================================== + +.. [#] Footnote in section + +`URL in term <http://sphinx-doc.org/>`_ + Description Description Description ... + +Footnote in term [#]_ + Description Description Description ... + + `Term in deflist <http://sphinx-doc.org/>`_ + Description2 + +.. [#] Footnote in term + +.. figure:: rimg.png + + This is the figure caption with a footnote to [#]_. + +.. [#] Footnote in caption + +.. list-table:: footnote [#]_ in caption of normal table + :widths: 1 1 + :header-rows: 1 + + * - name + - desc + * - a + - b + * - a + - b + +.. [#] Foot note in table + +.. list-table:: footnote [#]_ in caption [#]_ of longtable + :widths: 1 1 + :header-rows: 1 + + * - name + - desc + * - This is a reference to the code-block in the footnote: + :ref:`codeblockinfootnote` + - This is one more footnote with some code in it [#]_. + * - This is a reference to the other code block: + :ref:`codeblockinanotherfootnote` + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + * - a + - b + +.. [#] Foot note in longtable + +.. [#] Second footnote in caption of longtable + + .. code-block:: python + :caption: I am in a footnote + :name: codeblockinfootnote + + def foo(x,y): + return x+y + +.. [#] Third footnote in longtable + + .. code-block:: python + :caption: I am also in a footnote + :name: codeblockinanotherfootnote + + def bar(x,y): + return x+y + +The section with an object description +====================================== + +.. py:function:: dummy(N) + :no-index: + +Footnotes referred twice +======================== + +* Explicitly numbered footnote: [100]_ [100]_ +* Named footnote: [#twice]_ [#twice]_ + +.. [100] Numbered footnote +.. [#twice] Named footnote diff --git a/tests/roots/test-footnotes/rimg.png b/tests/roots/test-footnotes/rimg.png Binary files differnew file mode 100644 index 0000000..fda6cd2 --- /dev/null +++ b/tests/roots/test-footnotes/rimg.png diff --git a/tests/roots/test-gettext-template/_templates/template1.html b/tests/roots/test-gettext-template/_templates/template1.html new file mode 100644 index 0000000..f4b49f1 --- /dev/null +++ b/tests/roots/test-gettext-template/_templates/template1.html @@ -0,0 +1,5 @@ +{% extends "layout.html" %} +{% block body %} + <h1>{{ _('Template 1') }}</h1> + <p>{%trans%}This is Template 1.{%endtrans%}</p> +{% endblock %} diff --git a/tests/roots/test-gettext-template/_templates/template2.html b/tests/roots/test-gettext-template/_templates/template2.html new file mode 100644 index 0000000..2a21069 --- /dev/null +++ b/tests/roots/test-gettext-template/_templates/template2.html @@ -0,0 +1,5 @@ +{% extends "layout.html" %} +{% block body %} + <h1>{{ _('Template 2') }}</h1> + <p>{%trans%}This is Template 2.{%endtrans%}</p> +{% endblock %} diff --git a/tests/roots/test-gettext-template/conf.py b/tests/roots/test-gettext-template/conf.py new file mode 100644 index 0000000..3f793b7 --- /dev/null +++ b/tests/roots/test-gettext-template/conf.py @@ -0,0 +1 @@ +templates_path = ['_templates'] diff --git a/tests/roots/test-gettext-template/index.rst b/tests/roots/test-gettext-template/index.rst new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-gettext-template/index.rst diff --git a/tests/roots/test-glossary/conf.py b/tests/roots/test-glossary/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-glossary/conf.py diff --git a/tests/roots/test-glossary/index.rst b/tests/roots/test-glossary/index.rst new file mode 100644 index 0000000..1d84a02 --- /dev/null +++ b/tests/roots/test-glossary/index.rst @@ -0,0 +1,22 @@ +test-glossary +============= + +.. glossary:: + :sorted: + + boson + Particle with integer spin. + + *fermion* + Particle with half-integer spin. + + tauon + myon + electron + Examples for fermions. + + über + Gewisse + + ähnlich + Dinge diff --git a/tests/roots/test-highlight_options/conf.py b/tests/roots/test-highlight_options/conf.py new file mode 100644 index 0000000..90997d4 --- /dev/null +++ b/tests/roots/test-highlight_options/conf.py @@ -0,0 +1,4 @@ +highlight_options = { + 'default': {'default_option': True}, + 'python': {'python_option': True} +} diff --git a/tests/roots/test-highlight_options/index.rst b/tests/roots/test-highlight_options/index.rst new file mode 100644 index 0000000..389041a --- /dev/null +++ b/tests/roots/test-highlight_options/index.rst @@ -0,0 +1,14 @@ +test-highlight_options +====================== + +.. code-block:: + + blah blah blah + +.. code-block:: python + + blah blah blah + +.. code-block:: java + + blah blah blah diff --git a/tests/roots/test-html_assets/conf.py b/tests/roots/test-html_assets/conf.py new file mode 100644 index 0000000..7f94bbb --- /dev/null +++ b/tests/roots/test-html_assets/conf.py @@ -0,0 +1,12 @@ +project = 'Sphinx' +version = '1.4.4' + +html_static_path = ['static', 'subdir'] +html_extra_path = ['extra', 'subdir'] +html_css_files = ['css/style.css', + ('https://example.com/custom.css', + {'title': 'title', 'media': 'print', 'priority': 400})] +html_js_files = ['js/custom.js', + ('https://example.com/script.js', + {'async': 'async', 'priority': 400})] +exclude_patterns = ['**/_build', '**/.htpasswd'] diff --git a/tests/roots/test-html_assets/extra/.htaccess b/tests/roots/test-html_assets/extra/.htaccess new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-html_assets/extra/.htaccess diff --git a/tests/roots/test-html_assets/extra/.htpasswd b/tests/roots/test-html_assets/extra/.htpasswd new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-html_assets/extra/.htpasswd diff --git a/tests/roots/test-html_assets/extra/API.html_t b/tests/roots/test-html_assets/extra/API.html_t new file mode 100644 index 0000000..34ecd9d --- /dev/null +++ b/tests/roots/test-html_assets/extra/API.html_t @@ -0,0 +1 @@ +{{ project }}-{{ version }} diff --git a/tests/roots/test-html_assets/extra/css/style.css b/tests/roots/test-html_assets/extra/css/style.css new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-html_assets/extra/css/style.css diff --git a/tests/roots/test-html_assets/extra/index.rst b/tests/roots/test-html_assets/extra/index.rst new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-html_assets/extra/index.rst diff --git a/tests/roots/test-html_assets/extra/rimg.png b/tests/roots/test-html_assets/extra/rimg.png Binary files differnew file mode 100644 index 0000000..fda6cd2 --- /dev/null +++ b/tests/roots/test-html_assets/extra/rimg.png diff --git a/tests/roots/test-html_assets/extra/subdir/.htaccess b/tests/roots/test-html_assets/extra/subdir/.htaccess new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-html_assets/extra/subdir/.htaccess diff --git a/tests/roots/test-html_assets/extra/subdir/.htpasswd b/tests/roots/test-html_assets/extra/subdir/.htpasswd new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-html_assets/extra/subdir/.htpasswd diff --git a/tests/roots/test-html_assets/index.rst b/tests/roots/test-html_assets/index.rst new file mode 100644 index 0000000..6d56194 --- /dev/null +++ b/tests/roots/test-html_assets/index.rst @@ -0,0 +1,3 @@ +test-html_extra_path +===================== +this is dummy content diff --git a/tests/roots/test-html_assets/static/.htaccess b/tests/roots/test-html_assets/static/.htaccess new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-html_assets/static/.htaccess diff --git a/tests/roots/test-html_assets/static/.htpasswd b/tests/roots/test-html_assets/static/.htpasswd new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-html_assets/static/.htpasswd diff --git a/tests/roots/test-html_assets/static/API.html_t b/tests/roots/test-html_assets/static/API.html_t new file mode 100644 index 0000000..34ecd9d --- /dev/null +++ b/tests/roots/test-html_assets/static/API.html_t @@ -0,0 +1 @@ +{{ project }}-{{ version }} diff --git a/tests/roots/test-html_assets/static/css/style.css b/tests/roots/test-html_assets/static/css/style.css new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-html_assets/static/css/style.css diff --git a/tests/roots/test-html_assets/static/index.rst b/tests/roots/test-html_assets/static/index.rst new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-html_assets/static/index.rst diff --git a/tests/roots/test-html_assets/static/js/custom.js b/tests/roots/test-html_assets/static/js/custom.js new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-html_assets/static/js/custom.js diff --git a/tests/roots/test-html_assets/static/rimg.png b/tests/roots/test-html_assets/static/rimg.png Binary files differnew file mode 100644 index 0000000..fda6cd2 --- /dev/null +++ b/tests/roots/test-html_assets/static/rimg.png diff --git a/tests/roots/test-html_assets/static/subdir/.htaccess b/tests/roots/test-html_assets/static/subdir/.htaccess new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-html_assets/static/subdir/.htaccess diff --git a/tests/roots/test-html_assets/static/subdir/.htpasswd b/tests/roots/test-html_assets/static/subdir/.htpasswd new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-html_assets/static/subdir/.htpasswd diff --git a/tests/roots/test-html_assets/subdir/_build/index.html b/tests/roots/test-html_assets/subdir/_build/index.html new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-html_assets/subdir/_build/index.html diff --git a/tests/roots/test-html_assets/subdir/background.png b/tests/roots/test-html_assets/subdir/background.png Binary files differnew file mode 100644 index 0000000..fda6cd2 --- /dev/null +++ b/tests/roots/test-html_assets/subdir/background.png diff --git a/tests/roots/test-html_entity/conf.py b/tests/roots/test-html_entity/conf.py new file mode 100644 index 0000000..46bb290 --- /dev/null +++ b/tests/roots/test-html_entity/conf.py @@ -0,0 +1,2 @@ +html_theme = 'classic' +exclude_patterns = ['_build'] diff --git a/tests/roots/test-html_entity/index.rst b/tests/roots/test-html_entity/index.rst new file mode 100644 index 0000000..11f4f4c --- /dev/null +++ b/tests/roots/test-html_entity/index.rst @@ -0,0 +1,31 @@ +.. _index: + +test-html_entity (#3450) +========================= + +Empty cell +---------- + +.. list-table:: + + - * un + * + * trois + +Return description in function signature +---------------------------------------- + +.. py:function:: test() -> string + + rarr + +Field list that has long name (over 14 characters) +-------------------------------------------------- + +:abcdefghijklmnopqrstuvwxyz: fieldlist + +Option list that has long name (over 14 characters) +--------------------------------------------------- + +-a all +-b long_long_file use file diff --git a/tests/roots/test-html_file_checksum/conf.py b/tests/roots/test-html_file_checksum/conf.py new file mode 100644 index 0000000..f918814 --- /dev/null +++ b/tests/roots/test-html_file_checksum/conf.py @@ -0,0 +1 @@ +html_static_path = ['static'] diff --git a/tests/roots/test-html_file_checksum/index.rst b/tests/roots/test-html_file_checksum/index.rst new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-html_file_checksum/index.rst diff --git a/tests/roots/test-html_file_checksum/static/empty.js b/tests/roots/test-html_file_checksum/static/empty.js new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-html_file_checksum/static/empty.js diff --git a/tests/roots/test-html_file_checksum/static/script.js b/tests/roots/test-html_file_checksum/static/script.js new file mode 100644 index 0000000..9a21456 --- /dev/null +++ b/tests/roots/test-html_file_checksum/static/script.js @@ -0,0 +1 @@ +/* Script */ diff --git a/tests/roots/test-html_file_checksum/static/stylesheet-a.css b/tests/roots/test-html_file_checksum/static/stylesheet-a.css new file mode 100644 index 0000000..3a5d802 --- /dev/null +++ b/tests/roots/test-html_file_checksum/static/stylesheet-a.css @@ -0,0 +1 @@ +/* Stylesheet A */ diff --git a/tests/roots/test-html_file_checksum/static/stylesheet-b.css b/tests/roots/test-html_file_checksum/static/stylesheet-b.css new file mode 100644 index 0000000..ad5ff28 --- /dev/null +++ b/tests/roots/test-html_file_checksum/static/stylesheet-b.css @@ -0,0 +1 @@ +/* Stylesheet B */ diff --git a/tests/roots/test-html_scaled_image_link/conf.py b/tests/roots/test-html_scaled_image_link/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-html_scaled_image_link/conf.py diff --git a/tests/roots/test-html_scaled_image_link/img.png b/tests/roots/test-html_scaled_image_link/img.png Binary files differnew file mode 100644 index 0000000..a97e86d --- /dev/null +++ b/tests/roots/test-html_scaled_image_link/img.png diff --git a/tests/roots/test-html_scaled_image_link/index.rst b/tests/roots/test-html_scaled_image_link/index.rst new file mode 100644 index 0000000..0e47940 --- /dev/null +++ b/tests/roots/test-html_scaled_image_link/index.rst @@ -0,0 +1,11 @@ +test-html_scaled_image_link +=========================== + +.. image:: img.png + +.. image:: img.png + :scale: 50% + +.. image:: img.png + :scale: 50% + :class: no-scaled-link diff --git a/tests/roots/test-html_signaturereturn_icon/conf.py b/tests/roots/test-html_signaturereturn_icon/conf.py new file mode 100644 index 0000000..a695d18 --- /dev/null +++ b/tests/roots/test-html_signaturereturn_icon/conf.py @@ -0,0 +1 @@ +extensions = ['sphinx.ext.autodoc'] diff --git a/tests/roots/test-html_signaturereturn_icon/index.rst b/tests/roots/test-html_signaturereturn_icon/index.rst new file mode 100644 index 0000000..4ff4eb6 --- /dev/null +++ b/tests/roots/test-html_signaturereturn_icon/index.rst @@ -0,0 +1,4 @@ +test-html_signaturereturn_icon +============================== + +.. py:function:: foo(a: bool, b: int) -> str diff --git a/tests/roots/test-html_style/_static/default.css b/tests/roots/test-html_style/_static/default.css new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-html_style/_static/default.css diff --git a/tests/roots/test-html_style/conf.py b/tests/roots/test-html_style/conf.py new file mode 100644 index 0000000..df1b310 --- /dev/null +++ b/tests/roots/test-html_style/conf.py @@ -0,0 +1,2 @@ +html_style = 'default.css' +html_static_path = ['_static'] diff --git a/tests/roots/test-html_style/index.rst b/tests/roots/test-html_style/index.rst new file mode 100644 index 0000000..d8aef48 --- /dev/null +++ b/tests/roots/test-html_style/index.rst @@ -0,0 +1,2 @@ +html_style +========== diff --git a/tests/roots/test-image-escape/conf.py b/tests/roots/test-image-escape/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-image-escape/conf.py diff --git a/tests/roots/test-image-escape/img_#1.png b/tests/roots/test-image-escape/img_#1.png Binary files differnew file mode 100644 index 0000000..a97e86d --- /dev/null +++ b/tests/roots/test-image-escape/img_#1.png diff --git a/tests/roots/test-image-escape/index.rst b/tests/roots/test-image-escape/index.rst new file mode 100644 index 0000000..723bf43 --- /dev/null +++ b/tests/roots/test-image-escape/index.rst @@ -0,0 +1,5 @@ +Sphinx image handling +===================== + +.. an image with a character that is valid in a local file path but not a URL +.. image:: img_#1.png diff --git a/tests/roots/test-image-in-parsed-literal/conf.py b/tests/roots/test-image-in-parsed-literal/conf.py new file mode 100644 index 0000000..5d06da6 --- /dev/null +++ b/tests/roots/test-image-in-parsed-literal/conf.py @@ -0,0 +1,9 @@ +exclude_patterns = ['_build'] + +rst_epilog = ''' +.. |picture| image:: pic.png + :height: 1cm + :scale: 200% + :align: middle + :alt: alternative_text +''' diff --git a/tests/roots/test-image-in-parsed-literal/index.rst b/tests/roots/test-image-in-parsed-literal/index.rst new file mode 100644 index 0000000..80e1008 --- /dev/null +++ b/tests/roots/test-image-in-parsed-literal/index.rst @@ -0,0 +1,9 @@ +test-image-in-parsed-literal +============================ + +Dummy text + +.. parsed-literal:: + + |picture|\ AFTER + diff --git a/tests/roots/test-image-in-parsed-literal/pic.png b/tests/roots/test-image-in-parsed-literal/pic.png Binary files differnew file mode 100644 index 0000000..fda6cd2 --- /dev/null +++ b/tests/roots/test-image-in-parsed-literal/pic.png diff --git a/tests/roots/test-image-in-section/conf.py b/tests/roots/test-image-in-section/conf.py new file mode 100644 index 0000000..9cb250c --- /dev/null +++ b/tests/roots/test-image-in-section/conf.py @@ -0,0 +1,8 @@ +exclude_patterns = ['_build'] + +rst_epilog = ''' +.. |picture| image:: pic.png + :width: 15pt + :height: 15pt + :alt: alternative_text +''' diff --git a/tests/roots/test-image-in-section/index.rst b/tests/roots/test-image-in-section/index.rst new file mode 100644 index 0000000..08416d6 --- /dev/null +++ b/tests/roots/test-image-in-section/index.rst @@ -0,0 +1,22 @@ +test-image-in-section +===================== +this is dummy content + + +|picture| Test section +---------------------- +blah blah blah + + +Another section +--------------- +another blah + + +Other [blah] |picture| section +------------------------------ +other blah + +|picture| +--------- +blah blah blah diff --git a/tests/roots/test-image-in-section/pic.png b/tests/roots/test-image-in-section/pic.png Binary files differnew file mode 100644 index 0000000..fda6cd2 --- /dev/null +++ b/tests/roots/test-image-in-section/pic.png diff --git a/tests/roots/test-images/conf.py b/tests/roots/test-images/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-images/conf.py diff --git a/tests/roots/test-images/img.gif b/tests/roots/test-images/img.gif Binary files differnew file mode 100644 index 0000000..8f02686 --- /dev/null +++ b/tests/roots/test-images/img.gif diff --git a/tests/roots/test-images/img.ja.png b/tests/roots/test-images/img.ja.png Binary files differnew file mode 100644 index 0000000..a97e86d --- /dev/null +++ b/tests/roots/test-images/img.ja.png diff --git a/tests/roots/test-images/img.pdf b/tests/roots/test-images/img.pdf Binary files differnew file mode 100644 index 0000000..cacbd85 --- /dev/null +++ b/tests/roots/test-images/img.pdf diff --git a/tests/roots/test-images/img.png b/tests/roots/test-images/img.png Binary files differnew file mode 100644 index 0000000..a97e86d --- /dev/null +++ b/tests/roots/test-images/img.png diff --git a/tests/roots/test-images/img.zh.png b/tests/roots/test-images/img.zh.png Binary files differnew file mode 100644 index 0000000..a97e86d --- /dev/null +++ b/tests/roots/test-images/img.zh.png diff --git a/tests/roots/test-images/index.rst b/tests/roots/test-images/index.rst new file mode 100644 index 0000000..14a2987 --- /dev/null +++ b/tests/roots/test-images/index.rst @@ -0,0 +1,29 @@ +test-image +========== + +.. image:: rimg.png + +.. figure:: rimg.png + + The caption of pic + +.. image:: img.* + +.. figure:: img.* + + The caption of img + +.. image:: testimäge.png + +.. image:: rimg.png + :target: https://www.sphinx-doc.org/ + +.. image:: rimg.png + :align: center + :target: https://www.python.org/ + +.. a remote image +.. image:: https://www.python.org/static/img/python-logo.png + +.. non-exist remote image +.. image:: https://www.google.com/NOT_EXIST.PNG diff --git a/tests/roots/test-images/rimg.png b/tests/roots/test-images/rimg.png Binary files differnew file mode 100644 index 0000000..fda6cd2 --- /dev/null +++ b/tests/roots/test-images/rimg.png diff --git a/tests/roots/test-images/rimg.png.xx b/tests/roots/test-images/rimg.png.xx Binary files differnew file mode 100644 index 0000000..1081dc1 --- /dev/null +++ b/tests/roots/test-images/rimg.png.xx diff --git a/tests/roots/test-images/rimg.xx.png b/tests/roots/test-images/rimg.xx.png Binary files differnew file mode 100644 index 0000000..fda6cd2 --- /dev/null +++ b/tests/roots/test-images/rimg.xx.png diff --git a/tests/roots/test-images/subdir/index.rst b/tests/roots/test-images/subdir/index.rst new file mode 100644 index 0000000..72e742c --- /dev/null +++ b/tests/roots/test-images/subdir/index.rst @@ -0,0 +1,10 @@ +test-images/subdir +================== + +.. image:: rimg.png + +.. image:: svgimg.* + +.. figure:: svgimg.* + + The caption of svgimg diff --git a/tests/roots/test-images/subdir/rimg.png b/tests/roots/test-images/subdir/rimg.png Binary files differnew file mode 100644 index 0000000..fda6cd2 --- /dev/null +++ b/tests/roots/test-images/subdir/rimg.png diff --git a/tests/roots/test-images/subdir/rimg.xx.png b/tests/roots/test-images/subdir/rimg.xx.png Binary files differnew file mode 100644 index 0000000..fda6cd2 --- /dev/null +++ b/tests/roots/test-images/subdir/rimg.xx.png diff --git a/tests/roots/test-images/subdir/svgimg.pdf b/tests/roots/test-images/subdir/svgimg.pdf Binary files differnew file mode 100644 index 0000000..cacbd85 --- /dev/null +++ b/tests/roots/test-images/subdir/svgimg.pdf diff --git a/tests/roots/test-images/subdir/svgimg.svg b/tests/roots/test-images/subdir/svgimg.svg new file mode 100644 index 0000000..2bae0b9 --- /dev/null +++ b/tests/roots/test-images/subdir/svgimg.svg @@ -0,0 +1,4 @@ +<?xml version="1.0" encoding="UTF-8" standalone="no"?> +<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" height="60" width="60"> + <circle cx="40" cy="40" r="24" style="stroke:#000000; fill:#ffffff"/> +</svg> diff --git a/tests/roots/test-images/subdir/svgimg.xx.svg b/tests/roots/test-images/subdir/svgimg.xx.svg new file mode 100644 index 0000000..2bae0b9 --- /dev/null +++ b/tests/roots/test-images/subdir/svgimg.xx.svg @@ -0,0 +1,4 @@ +<?xml version="1.0" encoding="UTF-8" standalone="no"?> +<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" height="60" width="60"> + <circle cx="40" cy="40" r="24" style="stroke:#000000; fill:#ffffff"/> +</svg> diff --git a/tests/roots/test-images/testimäge.png b/tests/roots/test-images/testimäge.png Binary files differnew file mode 100644 index 0000000..a97e86d --- /dev/null +++ b/tests/roots/test-images/testimäge.png diff --git a/tests/roots/test-index_on_title/conf.py b/tests/roots/test-index_on_title/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-index_on_title/conf.py diff --git a/tests/roots/test-index_on_title/contents.rst b/tests/roots/test-index_on_title/contents.rst new file mode 100644 index 0000000..8256c42 --- /dev/null +++ b/tests/roots/test-index_on_title/contents.rst @@ -0,0 +1,5 @@ +index_on_title +============== + +Test for :index:`index` in top level title +------------------------------------------ diff --git a/tests/roots/test-inheritance/basic_diagram.rst b/tests/roots/test-inheritance/basic_diagram.rst new file mode 100644 index 0000000..4c3838e --- /dev/null +++ b/tests/roots/test-inheritance/basic_diagram.rst @@ -0,0 +1,5 @@ +Basic Diagram +============== + +.. inheritance-diagram:: + dummy.test diff --git a/tests/roots/test-inheritance/conf.py b/tests/roots/test-inheritance/conf.py new file mode 100644 index 0000000..26cadca --- /dev/null +++ b/tests/roots/test-inheritance/conf.py @@ -0,0 +1,7 @@ +import os +import sys + +sys.path.insert(0, os.path.abspath('.')) + +extensions = ['sphinx.ext.inheritance_diagram'] +source_suffix = '.rst' diff --git a/tests/roots/test-inheritance/diagram_module_w_2_top_classes.rst b/tests/roots/test-inheritance/diagram_module_w_2_top_classes.rst new file mode 100644 index 0000000..cc4365e --- /dev/null +++ b/tests/roots/test-inheritance/diagram_module_w_2_top_classes.rst @@ -0,0 +1,6 @@ +Diagram using module with 2 top classes +======================================= + +.. inheritance-diagram:: + dummy.test + :top-classes: dummy.test.B, dummy.test.C diff --git a/tests/roots/test-inheritance/diagram_w_1_top_class.rst b/tests/roots/test-inheritance/diagram_w_1_top_class.rst new file mode 100644 index 0000000..97da825 --- /dev/null +++ b/tests/roots/test-inheritance/diagram_w_1_top_class.rst @@ -0,0 +1,7 @@ +Diagram using 1 top class +========================= + +.. inheritance-diagram:: + dummy.test + :top-classes: dummy.test.B + diff --git a/tests/roots/test-inheritance/diagram_w_2_top_classes.rst b/tests/roots/test-inheritance/diagram_w_2_top_classes.rst new file mode 100644 index 0000000..8a6ae58 --- /dev/null +++ b/tests/roots/test-inheritance/diagram_w_2_top_classes.rst @@ -0,0 +1,9 @@ +Diagram using 2 top classes +=========================== + +.. inheritance-diagram:: + dummy.test.F + dummy.test.D + dummy.test.E + :top-classes: dummy.test.B, dummy.test.C + diff --git a/tests/roots/test-inheritance/diagram_w_nested_classes.rst b/tests/roots/test-inheritance/diagram_w_nested_classes.rst new file mode 100644 index 0000000..7fa0217 --- /dev/null +++ b/tests/roots/test-inheritance/diagram_w_nested_classes.rst @@ -0,0 +1,5 @@ +Diagram with Nested Classes +=========================== + +.. inheritance-diagram:: + dummy.test_nested diff --git a/tests/roots/test-inheritance/diagram_w_parts.rst b/tests/roots/test-inheritance/diagram_w_parts.rst new file mode 100644 index 0000000..65a8318 --- /dev/null +++ b/tests/roots/test-inheritance/diagram_w_parts.rst @@ -0,0 +1,7 @@ +Diagram using the parts option +============================== + +.. inheritance-diagram:: + dummy.test + :parts: 1 + diff --git a/tests/roots/test-inheritance/dummy/__init__.py b/tests/roots/test-inheritance/dummy/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-inheritance/dummy/__init__.py diff --git a/tests/roots/test-inheritance/dummy/test.py b/tests/roots/test-inheritance/dummy/test.py new file mode 100644 index 0000000..12fe8d9 --- /dev/null +++ b/tests/roots/test-inheritance/dummy/test.py @@ -0,0 +1,35 @@ +r""" + + Test with a class diagram like this:: + + A + / \ + B C + / \ / \ + E D F + +""" + + +class A: + pass + + +class B(A): + pass + + +class C(A): + pass + + +class D(B, C): + pass + + +class E(B): + pass + + +class F(C): + pass diff --git a/tests/roots/test-inheritance/dummy/test_nested.py b/tests/roots/test-inheritance/dummy/test_nested.py new file mode 100644 index 0000000..4b68018 --- /dev/null +++ b/tests/roots/test-inheritance/dummy/test_nested.py @@ -0,0 +1,11 @@ +"""Test with nested classes. +""" + + +class A: + class B: + pass + + +class C(A.B): + pass diff --git a/tests/roots/test-inheritance/index.rst b/tests/roots/test-inheritance/index.rst new file mode 100644 index 0000000..db4fbac --- /dev/null +++ b/tests/roots/test-inheritance/index.rst @@ -0,0 +1,4 @@ +.. toctree:: + :glob: + + * diff --git a/tests/roots/test-intl/_templates/contents.html b/tests/roots/test-intl/_templates/contents.html new file mode 100644 index 0000000..d730545 --- /dev/null +++ b/tests/roots/test-intl/_templates/contents.html @@ -0,0 +1,10 @@ +{% extends "layout.html" %} +{% block body %} + <h1>{{ _('Welcome') }}</h1> + <p>{%trans%}Sphinx {{ version }}{%endtrans%}</p> +{% endblock %} + +{% block comment %} +utf-8 encoded string: ニシキヘビ +{% endblock %} + diff --git a/tests/roots/test-intl/admonitions.txt b/tests/roots/test-intl/admonitions.txt new file mode 100644 index 0000000..a539461 --- /dev/null +++ b/tests/roots/test-intl/admonitions.txt @@ -0,0 +1,50 @@ +:tocdepth: 2 + +Admonitions +================== +.. #1206 gettext did not translate admonition directive's title + +.. attention:: attention title + + attention body + +.. caution:: caution title + + caution body + +.. danger:: danger title + + danger body + +.. error:: error title + + error body + +.. hint:: hint title + + hint body + +.. important:: important title + + important body + +.. note:: note title + + note body + +.. tip:: tip title + + tip body + +.. warning:: warning title + + warning body + +.. admonition:: admonition title + + admonition body + +.. admonition:: 1. admonition title + + admonition body + diff --git a/tests/roots/test-intl/bom.txt b/tests/roots/test-intl/bom.txt new file mode 100644 index 0000000..3fea824 --- /dev/null +++ b/tests/roots/test-intl/bom.txt @@ -0,0 +1,5 @@ +File with UTF-8 BOM +=================== + +This file has a UTF-8 "BOM". + diff --git a/tests/roots/test-intl/conf.py b/tests/roots/test-intl/conf.py new file mode 100644 index 0000000..96ac664 --- /dev/null +++ b/tests/roots/test-intl/conf.py @@ -0,0 +1,8 @@ +project = 'Sphinx intl <Tests>' +source_suffix = '.txt' +keep_warnings = True +templates_path = ['_templates'] +html_additional_pages = {'contents': 'contents.html'} +release = version = '2013.120' +gettext_additional_targets = ['index'] +exclude_patterns = ['_build'] diff --git a/tests/roots/test-intl/definition_terms.txt b/tests/roots/test-intl/definition_terms.txt new file mode 100644 index 0000000..4c56288 --- /dev/null +++ b/tests/roots/test-intl/definition_terms.txt @@ -0,0 +1,16 @@ +:tocdepth: 2 + +i18n with definition terms +========================== + +Some term + The corresponding definition + +Some *term* `with link <http://sphinx-doc.org/>`__ + The corresponding definition #2 + +Some **term** with : classifier1 : classifier2 + The corresponding definition + +Some term with : classifier[] + The corresponding definition diff --git a/tests/roots/test-intl/docfields.txt b/tests/roots/test-intl/docfields.txt new file mode 100644 index 0000000..b1b3c89 --- /dev/null +++ b/tests/roots/test-intl/docfields.txt @@ -0,0 +1,46 @@ +:tocdepth: 2 + +i18n with docfields +=================== + +.. single TypedField + +.. class:: Cls1 + :no-index: + + :param param: description of parameter param + +.. grouped TypedFields + +.. class:: Cls2 + :no-index: + + :param foo: description of parameter foo + :param bar: description of parameter bar + + +.. single GroupedField + +.. class:: Cls3(values) + :no-index: + + :raises ValueError: if the values are out of range + +.. grouped GroupedFields + +.. class:: Cls4(values) + :no-index: + + :raises TypeError: if the values are not valid + :raises ValueError: if the values are out of range + + +.. single Field + +.. class:: Cls5 + :no-index: + + :returns: a new :class:`Cls3` instance + +.. Field is never grouped + diff --git a/tests/roots/test-intl/external_links.txt b/tests/roots/test-intl/external_links.txt new file mode 100644 index 0000000..1cecbee --- /dev/null +++ b/tests/roots/test-intl/external_links.txt @@ -0,0 +1,35 @@ +:tocdepth: 2 + +i18n with external links +======================== +.. #1044 external-links-dont-work-in-localized-html + +External link to Python_. + +Internal link to `i18n with external links`_. + +Inline link by `Sphinx Site <http://sphinx-doc.org>`_. + +Unnamed link__. + +.. _Python: http://python.org/index.html +.. __: http://google.com + + +link target swapped translation +================================ + +link to external1_ and external2_. + +link to `Sphinx Site <http://sphinx-doc.org>`_ and `Python Site <http://python.org>`_. + +.. _external1: https://www.google.com/external1 +.. _external2: https://www.google.com/external2 + + +Multiple references in the same line +===================================== + +Link to `Sphinx Site <http://sphinx-doc.org>`_, `Python Site <http://python.org>`_, Python_, Unnamed__ and `i18n with external links`_. + +.. __: http://google.com diff --git a/tests/roots/test-intl/figure.txt b/tests/roots/test-intl/figure.txt new file mode 100644 index 0000000..633e12e --- /dev/null +++ b/tests/roots/test-intl/figure.txt @@ -0,0 +1,53 @@ +:tocdepth: 2 + +i18n with figure caption +======================== + +.. figure:: i18n.png + + My caption of the figure + + My description paragraph1 of the figure. + + My description paragraph2 of the figure. + +figure in the block +--------------------- + +block + + .. figure:: i18n.png + + My caption of the figure + + My description paragraph1 of the figure. + + My description paragraph2 of the figure. + + +image url and alt +------------------- + +.. image:: i18n.png + :alt: i18n + +.. figure:: img.png + :alt: img + + +image on substitution +--------------------- + +.. |sub image| image:: i18n.png + +image under note +----------------- + +.. note:: + + .. image:: i18n.png + :alt: i18n under note + + .. figure:: img.png + :alt: img under note + diff --git a/tests/roots/test-intl/footnote.txt b/tests/roots/test-intl/footnote.txt new file mode 100644 index 0000000..0bbed91 --- /dev/null +++ b/tests/roots/test-intl/footnote.txt @@ -0,0 +1,14 @@ +:tocdepth: 2 + +i18n with Footnote +================== +.. #955 cant-build-html-with-footnotes-when-using + +[100]_ Contents [#]_ for `i18n with Footnote`_ [ref]_ [#named]_ [*]_. +second footnote_ref [100]_. + +.. [#] This is a auto numbered footnote. +.. [ref] This is a named footnote. +.. [100] This is a numbered footnote. +.. [#named] This is a auto numbered named footnote. +.. [*] This is a auto symbol footnote. diff --git a/tests/roots/test-intl/glossary_terms.txt b/tests/roots/test-intl/glossary_terms.txt new file mode 100644 index 0000000..473d857 --- /dev/null +++ b/tests/roots/test-intl/glossary_terms.txt @@ -0,0 +1,29 @@ +:tocdepth: 2 + +i18n with glossary terms +======================== + +.. glossary:: + + Some term + The corresponding glossary + + Some other term + The corresponding glossary #2 + +link to :term:`Some term`. + +Translated glossary should be sorted by translated terms: + +.. glossary:: + :sorted: + + AAA + Define AAA + + CCC + EEE + Define CCC + + BBB + Define BBB diff --git a/tests/roots/test-intl/glossary_terms_inconsistency.txt b/tests/roots/test-intl/glossary_terms_inconsistency.txt new file mode 100644 index 0000000..837411b --- /dev/null +++ b/tests/roots/test-intl/glossary_terms_inconsistency.txt @@ -0,0 +1,6 @@ +:tocdepth: 2 + +i18n with glossary terms inconsistency +====================================== + +1. link to :term:`Some term` and :term:`Some other term`. diff --git a/tests/roots/test-intl/i18n.png b/tests/roots/test-intl/i18n.png Binary files differnew file mode 100644 index 0000000..a97e86d --- /dev/null +++ b/tests/roots/test-intl/i18n.png diff --git a/tests/roots/test-intl/img.png b/tests/roots/test-intl/img.png Binary files differnew file mode 100644 index 0000000..a97e86d --- /dev/null +++ b/tests/roots/test-intl/img.png diff --git a/tests/roots/test-intl/index.txt b/tests/roots/test-intl/index.txt new file mode 100644 index 0000000..9de15d5 --- /dev/null +++ b/tests/roots/test-intl/index.txt @@ -0,0 +1,40 @@ +CONTENTS +======== + +.. meta:: + :description: testdata for i18n + :keywords: i18n, sphinx, markup + +.. toctree:: + :maxdepth: 2 + :numbered: + :caption: Table of Contents + + subdir/index + bom + warnings + footnote + external_links + refs_inconsistency + literalblock + seealso + definition_terms + figure + index_entries + role_xref + glossary_terms + glossary_terms_inconsistency + versionchange + docfields + raw + refs + section + translation_progress + topic + +.. toctree:: + :maxdepth: 2 + :caption: Hidden Toc + :hidden: + + only diff --git a/tests/roots/test-intl/index_entries.txt b/tests/roots/test-intl/index_entries.txt new file mode 100644 index 0000000..e9300d0 --- /dev/null +++ b/tests/roots/test-intl/index_entries.txt @@ -0,0 +1,24 @@ +:tocdepth: 2 + +i18n with index entries +======================= + +.. index:: + single: Mailing List + pair: Newsletter; Recipients List + +index target section +-------------------- + +this is :index:`Newsletter` target paragraph. + + +various index entries +--------------------- + +.. index:: + triple: First; Second; Third + see: Entry; Mailing List + seealso: See; Newsletter + +That's all. diff --git a/tests/roots/test-intl/label_target.txt b/tests/roots/test-intl/label_target.txt new file mode 100644 index 0000000..ac00084 --- /dev/null +++ b/tests/roots/test-intl/label_target.txt @@ -0,0 +1,67 @@ +:tocdepth: 2 + +.. _implicit-target: + +section and label +================== + +.. This section's label and section title are different. +.. This case, the section have 2 target id. + +:ref:`implicit-target` point to ``implicit-target`` and +`section and label`_ point to ``section-and-label``. + + +.. _explicit-target: + +explicit-target +================ + +.. This section's label equals to section title. +.. This case, a duplicated target id is generated by docutils. + +:ref:`explicit-target` point to ``explicit-target`` and +`explicit-target`_ point to duplicated id like ``id1``. + + +implicit section name +====================== + +.. This section have no label. +.. This case, the section have one id. + +`implicit section name`_ point to ``implicit-section-name``. + +duplicated sub section +------------------------ + +.. This section have no label, but name will be duplicated by next section. +.. This case, the section have one id. + +`duplicated sub section`_ is broken link. + +.. There is no way to link to this section's ``duplicated-sub-section``` by +.. using formal reStructuredText markup. + +duplicated sub section +------------------------ + +.. This section have no label, but the section was a duplicate name. +.. This case, a duplicated target id is generated by docutils. + +.. There is no way to link to this section's duplicated id like ``id2`` by +.. using formal reStructuredText markup. + + +.. _bridge label: `label bridged target section`_ +.. _bridge label2: `section and label`_ + +label bridged target section +============================= + +.. This section is targeted through label definition. + +`bridge label`_ is not translatable but linked to translated section title. + +`bridge label2`_ point to ``section and label`` and `bridge label`_ point to ``label bridged target section``. The second appeared `bridge label2`_ point to correct target. + diff --git a/tests/roots/test-intl/literalblock.txt b/tests/roots/test-intl/literalblock.txt new file mode 100644 index 0000000..583b5b6 --- /dev/null +++ b/tests/roots/test-intl/literalblock.txt @@ -0,0 +1,71 @@ +:tocdepth: 2 + +i18n with literal block +========================= + +Correct literal block:: + + this is + literal block + +Missing literal block:: + +That's all. + +.. literalinclude:: raw.txt + :caption: included raw.txt + +code blocks +============== + +.. highlight:: ruby + +:: + + def main + 'result' + end + +:: + + #include <stdlib.h> + int main(int argc, char** argv) + { + return 0; + } + +.. code-block:: c + :caption: example of C language + + #include <stdio.h> + int main(int argc, char** argv) + { + return 0; + } + + +* :: + + literal-block + in list + +.. highlight:: none + +:: + + test_code_for_noqa() + continued() + + +doctest blocks +============== + +.. highlight:: python + +>>> import sys # sys importing +>>> def main(): # define main function +... sys.stdout.write('hello') # call write method of stdout object +>>> +>>> if __name__ == '__main__': # if run this py file as python script +... main() # call main + diff --git a/tests/roots/test-intl/noqa.txt b/tests/roots/test-intl/noqa.txt new file mode 100644 index 0000000..004b301 --- /dev/null +++ b/tests/roots/test-intl/noqa.txt @@ -0,0 +1,16 @@ +First section +============= + +Some text with a reference, :ref:`next-section`. + +Another reference: :ref:`next-section`. + +This should allow to test escaping ``#noqa``. + +.. _next-section: + +Next section +============ + +Some text, again referring to the section: :ref:`next-section`. + diff --git a/tests/roots/test-intl/only.txt b/tests/roots/test-intl/only.txt new file mode 100644 index 0000000..2c8990e --- /dev/null +++ b/tests/roots/test-intl/only.txt @@ -0,0 +1,14 @@ +Only directive +-------------- + +.. only:: html + + In HTML. + +.. only:: latex + + In LaTeX. + +.. only:: html or latex + + In both. diff --git a/tests/roots/test-intl/raw.txt b/tests/roots/test-intl/raw.txt new file mode 100644 index 0000000..fe77f6c --- /dev/null +++ b/tests/roots/test-intl/raw.txt @@ -0,0 +1,8 @@ +=== +Raw +=== + +.. raw:: html + + <iframe src="http://sphinx-doc.org"></iframe> + diff --git a/tests/roots/test-intl/refs.txt b/tests/roots/test-intl/refs.txt new file mode 100644 index 0000000..4a094b2 --- /dev/null +++ b/tests/roots/test-intl/refs.txt @@ -0,0 +1,48 @@ +References +=========== + +Translation Tips +----------------- + +.. _download Sphinx: https://pypi.org/project/Sphinx/ +.. _Docutils site: https://docutils.sourceforge.io/ +.. _Sphinx site: http://sphinx-doc.org/ + + +A-1. Here's how you can `download Sphinx`_. + +A-2. Here's how you can `download Sphinx`_. + +A-3. Here's how you can `download Sphinx`_. + +B-1. `Docutils site`_ and `Sphinx site`_. + +B-2. `Docutils site`_ and `Sphinx site`_. + +B-3. `Docutils site`_ and `Sphinx site`_. + +B-4. `Docutils site`_ and `Sphinx site`_. + +C-1. Link to `Translation Tips`_ section. + +C-2. Link to `Translation Tips`_ section. + +C-3. Link to `Translation Tips`_ section. + +C-4. Link to `Translation Tips`_ section. + +C-5. Link to `Translation Tips`_ section. + +D-1. Link to `Translation Tips`_ and `Next Section`_ section. + +D-2. Link to `Translation Tips`_ and `Next Section`_ section. + +D-3. Link to `Translation Tips`_ and `Next Section`_ section. + +D-4. Link to `Translation Tips`_ and `Next Section`_ section. + +D-5. Link to `Translation Tips`_ and `Next Section`_ section. + +Next Section +------------- +Last updated |today|. diff --git a/tests/roots/test-intl/refs_inconsistency.txt b/tests/roots/test-intl/refs_inconsistency.txt new file mode 100644 index 0000000..b16623a --- /dev/null +++ b/tests/roots/test-intl/refs_inconsistency.txt @@ -0,0 +1,13 @@ +:tocdepth: 2 + +i18n with refs inconsistency +============================= + +* [100]_ for [#]_ citation [ref2]_. +* for reference_. +* normal text. + +.. [#] This is a auto numbered footnote. +.. [ref2] This is a citation. +.. [100] This is a numbered footnote. +.. _reference: http://www.example.com diff --git a/tests/roots/test-intl/refs_python_domain.txt b/tests/roots/test-intl/refs_python_domain.txt new file mode 100644 index 0000000..2b021f2 --- /dev/null +++ b/tests/roots/test-intl/refs_python_domain.txt @@ -0,0 +1,15 @@ +:tocdepth: 2 + +i18n with python domain refs +============================= + +.. currentmodule:: sensitive + +See this decorator: :func:`sensitive_variables`. + +.. function:: sensitive_variables(*variables) + + Some description + +.. currentmodule:: reporting + diff --git a/tests/roots/test-intl/role_xref.txt b/tests/roots/test-intl/role_xref.txt new file mode 100644 index 0000000..2919b5c --- /dev/null +++ b/tests/roots/test-intl/role_xref.txt @@ -0,0 +1,40 @@ +:tocdepth: 2 + +.. _i18n-role-xref: + +i18n role xref +============== + +link to :term:`Some term`, :ref:`i18n-role-xref`, :doc:`index`. + +.. _same-type-links: + +same type links +================= + +link to :term:`Some term` and :term:`Some other term`. + +link to :ref:`i18n-role-xref`, :ref:`same-type-links` and :ref:`label <same-type-links>`. + +link to :doc:`index` and :doc:`glossary_terms`. + +link to :option:`-m` and :option:`--module`. + +link to :envvar:`env1` and :envvar:`env2`. + +link to :token:`token1` and :token:`token2`. + +link to :keyword:`i18n-role-xref` and :keyword:`same-type-links`. + + +.. option:: -m <module> + +.. option:: --module <module> + +.. envvar:: env1 + +.. envvar:: env2 + +.. productionlist:: + token_stmt: `token1` ":" `token2` + diff --git a/tests/roots/test-intl/rubric.txt b/tests/roots/test-intl/rubric.txt new file mode 100644 index 0000000..f285d49 --- /dev/null +++ b/tests/roots/test-intl/rubric.txt @@ -0,0 +1,14 @@ +:tocdepth: 2 + +i18n with rubric +================ + +.. rubric:: rubric title + +rubric in the block +------------------- + +block + + .. rubric:: rubric title + diff --git a/tests/roots/test-intl/section.txt b/tests/roots/test-intl/section.txt new file mode 100644 index 0000000..ae0604c --- /dev/null +++ b/tests/roots/test-intl/section.txt @@ -0,0 +1,8 @@ +1. Section +========== + +.. contents:: 3. Contents Title + :local: + +2. Sub Section +-------------- diff --git a/tests/roots/test-intl/seealso.txt b/tests/roots/test-intl/seealso.txt new file mode 100644 index 0000000..ed88599 --- /dev/null +++ b/tests/roots/test-intl/seealso.txt @@ -0,0 +1,15 @@ +:tocdepth: 2 + +i18n with seealso +============================ +.. #960 directive-seelaso-ignored-in-the-gettext + +.. seealso:: short text 1 + +.. seealso:: + + long text 1 + +.. seealso:: short text 2 + + long text 2 diff --git a/tests/roots/test-intl/subdir/index.txt b/tests/roots/test-intl/subdir/index.txt new file mode 100644 index 0000000..7578ce3 --- /dev/null +++ b/tests/roots/test-intl/subdir/index.txt @@ -0,0 +1,2 @@ +subdir contents +=============== diff --git a/tests/roots/test-intl/table.txt b/tests/roots/test-intl/table.txt new file mode 100644 index 0000000..cf82438 --- /dev/null +++ b/tests/roots/test-intl/table.txt @@ -0,0 +1,20 @@ +:tocdepth: 2 + +i18n with table +=============== + +.. table:: table caption + + ======= ======= + header1 header2 + ------- ------- + text1 text2 + text3 text4 + text5 text6 + ======= ======= + +.. table:: 1. table caption + + +-----+ + |text1| + +-----+ diff --git a/tests/roots/test-intl/toctree.txt b/tests/roots/test-intl/toctree.txt new file mode 100644 index 0000000..35c956a --- /dev/null +++ b/tests/roots/test-intl/toctree.txt @@ -0,0 +1,10 @@ +i18n with toctree +================= + +.. toctree:: + :caption: caption + + figure <figure> + table + https://www.sphinx-doc.org/ + self diff --git a/tests/roots/test-intl/topic.txt b/tests/roots/test-intl/topic.txt new file mode 100644 index 0000000..255a334 --- /dev/null +++ b/tests/roots/test-intl/topic.txt @@ -0,0 +1,13 @@ +:tocdepth: 2 + +i18n with topic +================ + +.. topic:: Topic Title + + Topic Content + +.. topic:: 1. Topic Title + + Topic Content + diff --git a/tests/roots/test-intl/translation_progress.txt b/tests/roots/test-intl/translation_progress.txt new file mode 100644 index 0000000..f70ab7a --- /dev/null +++ b/tests/roots/test-intl/translation_progress.txt @@ -0,0 +1,40 @@ +Translation Progress +==================== + +When, in disgrace with fortune and men’s eyes, + +I all alone beweep my outcast state, + +And trouble deaf heaven with my bootless cries, + +And look upon myself, and curse my fate, + +Wishing me like to one more rich in hope, + +Featur’d like him, like him with friends possess’d, + +Desiring this man’s art and that man’s scope, + +With what I most enjoy contented least; + +.. idempotent translations (2 out of 14 lines): + +Yet in these thoughts myself almost despising, + +Haply I think on thee, and then my state, + +.. untranslated (2 out of 14 lines): + +Like to the lark at break of day arising + +From sullen earth, sings hymns at heaven’s gate; + +.. translation missing (2 out of 14 lines): + +For thy sweet love remember’d such wealth brings + +That then I scorn to change my state with kings. + +.. translation progress substitution + +|translation progress| diff --git a/tests/roots/test-intl/versionchange.txt b/tests/roots/test-intl/versionchange.txt new file mode 100644 index 0000000..4c57e14 --- /dev/null +++ b/tests/roots/test-intl/versionchange.txt @@ -0,0 +1,16 @@ +:tocdepth: 2 + +i18n with versionchange +============================ + +.. deprecated:: 1.0 + This is the *first* paragraph of deprecated. + + This is the *second* paragraph of deprecated. + +.. versionadded:: 1.0 + This is the *first* paragraph of versionadded. + +.. versionchanged:: 1.0 + + This is the *first* paragraph of versionchanged. diff --git a/tests/roots/test-intl/warnings.txt b/tests/roots/test-intl/warnings.txt new file mode 100644 index 0000000..a80fe18 --- /dev/null +++ b/tests/roots/test-intl/warnings.txt @@ -0,0 +1,5 @@ +i18n with reST warnings +======================== + +line of ``literal`` markup. + diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/admonitions.po b/tests/roots/test-intl/xx/LC_MESSAGES/admonitions.po new file mode 100644 index 0000000..f114e33 --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/admonitions.po @@ -0,0 +1,84 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2013, test_intl +# This file is distributed under the same license as the foo package. +# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: sphinx 1.2\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2013-07-03 12:00+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "Admonitions" +msgstr "ADMONITIONS" + +msgid "attention title" +msgstr "ATTENTION TITLE" + +msgid "attention body" +msgstr "ATTENTION BODY" + +msgid "caution title" +msgstr "CAUTION TITLE" + +msgid "caution body" +msgstr "CAUTION BODY" + +msgid "danger title" +msgstr "DANGER TITLE" + +msgid "danger body" +msgstr "DANGER BODY" + +msgid "error title" +msgstr "ERROR TITLE" + +msgid "error body" +msgstr "ERROR BODY" + +msgid "hint title" +msgstr "HINT TITLE" + +msgid "hint body" +msgstr "HINT BODY" + +msgid "important title" +msgstr "IMPORTANT TITLE" + +msgid "important body" +msgstr "IMPORTANT BODY" + +msgid "note title" +msgstr "NOTE TITLE" + +msgid "note body" +msgstr "NOTE BODY" + +msgid "tip title" +msgstr "TIP TITLE" + +msgid "tip body" +msgstr "TIP BODY" + +msgid "warning title" +msgstr "WARNING TITLE" + +msgid "warning body" +msgstr "WARNING BODY" + +msgid "admonition title" +msgstr "ADMONITION TITLE" + +msgid "admonition body" +msgstr "ADMONITION BODY" + +msgid "1. admonition title" +msgstr "1. ADMONITION TITLE" + diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/bom.po b/tests/roots/test-intl/xx/LC_MESSAGES/bom.po new file mode 100644 index 0000000..c6025eb --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/bom.po @@ -0,0 +1,12 @@ +#, fuzzy +msgid "" +msgstr "" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "File with UTF-8 BOM" +msgstr "Datei mit UTF-8" + +msgid "This file has a UTF-8 \"BOM\"." +msgstr "This file has umlauts: äöü." diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/definition_terms.po b/tests/roots/test-intl/xx/LC_MESSAGES/definition_terms.po new file mode 100644 index 0000000..1752dd6 --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/definition_terms.po @@ -0,0 +1,47 @@ +# SOME DESCRIPTIVE TITLE.
+# Copyright (C) 2012, foof
+# This file is distributed under the same license as the foo package.
+# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: sphinx 1.0\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2013-01-01 05:00+0000\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: LANGUAGE <LL@li.org>\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+msgid "i18n with definition terms"
+msgstr "I18N WITH DEFINITION TERMS"
+
+msgid "Some term"
+msgstr "SOME TERM"
+
+msgid "The corresponding definition"
+msgstr "THE CORRESPONDING DEFINITION"
+
+msgid "Some *term* `with link <http://sphinx-doc.org/>`__"
+msgstr "SOME *TERM* `WITH LINK <http://sphinx-doc.org/>`__"
+
+msgid "The corresponding definition #2"
+msgstr "THE CORRESPONDING DEFINITION #2"
+
+msgid "Some **term** with"
+msgstr "SOME **TERM** WITH"
+
+msgid "classifier1"
+msgstr "CLASSIFIER1"
+
+msgid "classifier2"
+msgstr "CLASSIFIER2"
+
+msgid "Some term with"
+msgstr "SOME TERM WITH"
+
+msgid "classifier[]"
+msgstr "CLASSIFIER[]"
diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/docfields.po b/tests/roots/test-intl/xx/LC_MESSAGES/docfields.po new file mode 100644 index 0000000..8c3b8f9 --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/docfields.po @@ -0,0 +1,39 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2010, Georg Brandl & Team +# This file is distributed under the same license as the Sphinx <Tests> package. +# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: Sphinx <Tests> 0.6\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-12-16 14:11+0000\n" +"PO-Revision-Date: 2012-12-18 06:14+0900\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "i18n with docfields" +msgstr "I18N WITH DOCFIELDS" + +msgid "description of parameter param" +msgstr "DESCRIPTION OF PARAMETER param" + +msgid "description of parameter foo" +msgstr "DESCRIPTION OF PARAMETER foo" + +msgid "description of parameter bar" +msgstr "DESCRIPTION OF PARAMETER bar" + +msgid "if the values are not valid" +msgstr "IF THE VALUES ARE NOT VALID" + +msgid "if the values are out of range" +msgstr "IF THE VALUES ARE OUT OF RANGE" + +msgid "a new :class:`Cls3` instance" +msgstr "A NEW :class:`Cls3` INSTANCE" + diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/external_links.po b/tests/roots/test-intl/xx/LC_MESSAGES/external_links.po new file mode 100644 index 0000000..8c53abb --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/external_links.po @@ -0,0 +1,47 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2012, foof +# This file is distributed under the same license as the foo package. +# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: sphinx 1.0\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-11-22 08:28+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "i18n with external links" +msgstr "EXTERNAL LINKS" + +msgid "External link to Python_." +msgstr "EXTERNAL LINK TO Python_." + +msgid "Internal link to `i18n with external links`_." +msgstr "`EXTERNAL LINKS`_ IS INTERNAL LINK." + +msgid "Inline link by `Sphinx Site <http://sphinx-doc.org>`_." +msgstr "INLINE LINK BY `THE SPHINX SITE <http://sphinx-doc.org>`_." + +msgid "Unnamed link__." +msgstr "UNNAMED LINK__." + +msgid "link target swapped translation" +msgstr "LINK TARGET SWAPPED TRANSLATION" + +msgid "link to external1_ and external2_." +msgstr "LINK TO external2_ AND external1_." + +msgid "link to `Sphinx Site <http://sphinx-doc.org>`_ and `Python Site <http://python.org>`_." +msgstr "LINK TO `THE PYTHON SITE <http://python.org>`_ AND `THE SPHINX SITE <http://sphinx-doc.org>`_." + +msgid "Multiple references in the same line" +msgstr "MULTIPLE REFERENCES IN THE SAME LINE" + +msgid "Link to `Sphinx Site <http://sphinx-doc.org>`_, `Python Site <http://python.org>`_, Python_, Unnamed__ and `i18n with external links`_." +msgstr "LINK TO `EXTERNAL LINKS`_, Python_, `THE SPHINX SITE <http://sphinx-doc.org>`_, UNNAMED__ AND `THE PYTHON SITE <http://python.org>`_." diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/figure.po b/tests/roots/test-intl/xx/LC_MESSAGES/figure.po new file mode 100644 index 0000000..64bbdf7 --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/figure.po @@ -0,0 +1,57 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2012, foof +# This file is distributed under the same license as the foo package. +# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: sphinx 1.0\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2013-01-04 07:00+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "i18n with figure caption" +msgstr "I18N WITH FIGURE CAPTION" + +msgid "My caption of the figure" +msgstr "MY CAPTION OF THE FIGURE" + +msgid "My description paragraph1 of the figure." +msgstr "MY DESCRIPTION PARAGRAPH1 OF THE FIGURE." + +msgid "My description paragraph2 of the figure." +msgstr "MY DESCRIPTION PARAGRAPH2 OF THE FIGURE." + +msgid "figure in the block" +msgstr "FIGURE IN THE BLOCK" + +msgid "block" +msgstr "BLOCK" + +msgid "image url and alt" +msgstr "IMAGE URL AND ALT" + +msgid "img" +msgstr "IMG -> I18N" + +msgid ".. image:: img.png" +msgstr ".. image:: i18n.png" + +msgid "i18n" +msgstr "I18N -> IMG" + +msgid ".. image:: i18n.png" +msgstr ".. image:: img.png" + +msgid "image on substitution" +msgstr "IMAGE ON SUBSTITUTION" + +msgid "image under note" +msgstr "IMAGE UNDER NOTE" + diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/footnote.po b/tests/roots/test-intl/xx/LC_MESSAGES/footnote.po new file mode 100644 index 0000000..869bf62 --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/footnote.po @@ -0,0 +1,40 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2012, foof +# This file is distributed under the same license as the foo package. +# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: sphinx 1.0\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-11-22 08:28+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "i18n with Footnote" +msgstr "I18N WITH FOOTNOTE" + +msgid "[100]_ Contents [#]_ for `i18n with Footnote`_ [ref]_ [#named]_ [*]_. " +"second footnote_ref [100]_." +msgstr "`I18N WITH FOOTNOTE`_ INCLUDE THIS CONTENTS [#named]_ [ref]_ [#]_ [100]_ [*]_. " +"SECOND FOOTNOTE_REF [100]_." + +msgid "This is a auto numbered footnote." +msgstr "THIS IS A AUTO NUMBERED FOOTNOTE." + +msgid "This is a named footnote." +msgstr "THIS IS A NAMED FOOTNOTE." + +msgid "This is a numbered footnote." +msgstr "THIS IS A NUMBERED FOOTNOTE." + +msgid "This is a auto numbered named footnote." +msgstr "THIS IS A AUTO NUMBERED NAMED FOOTNOTE." + +msgid "This is a auto symbol footnote." +msgstr "THIS IS A AUTO SYMBOL FOOTNOTE." diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/glossary_terms.po b/tests/roots/test-intl/xx/LC_MESSAGES/glossary_terms.po new file mode 100644 index 0000000..83542f1 --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/glossary_terms.po @@ -0,0 +1,59 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2012, foof +# This file is distributed under the same license as the foo package. +# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: sphinx 1.0\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2013-01-29 14:10+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "i18n with glossary terms" +msgstr "I18N WITH GLOSSARY TERMS" + +msgid "Some term" +msgstr "SOME NEW TERM" + +msgid "The corresponding glossary" +msgstr "THE CORRESPONDING GLOSSARY" + +msgid "Some other term" +msgstr "SOME OTHER NEW TERM" + +msgid "The corresponding glossary #2" +msgstr "THE CORRESPONDING GLOSSARY #2" + +msgid "link to :term:`Some term`." +msgstr "LINK TO :term:`SOME NEW TERM`." + +msgid "Translated glossary should be sorted by translated terms:" +msgstr "TRANSLATED GLOSSARY SHOULD BE SORTED BY TRANSLATED TERMS:" + +msgid "BBB" +msgstr "TRANSLATED TERM XXX" + +msgid "Define BBB" +msgstr "DEFINE XXX" + +msgid "AAA" +msgstr "TRANSLATED TERM YYY" + +msgid "Define AAA" +msgstr "DEFINE YYY" + +msgid "CCC" +msgstr "TRANSLATED TERM ZZZ" + +msgid "EEE" +msgstr "VVV" + +msgid "Define CCC" +msgstr "DEFINE ZZZ" diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/glossary_terms_inconsistency.po b/tests/roots/test-intl/xx/LC_MESSAGES/glossary_terms_inconsistency.po new file mode 100644 index 0000000..ef2bf30 --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/glossary_terms_inconsistency.po @@ -0,0 +1,23 @@ +# SOME DESCRIPTIVE TITLE.
+# Copyright (C) 2012, foof
+# This file is distributed under the same license as the foo package.
+# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: sphinx 1.0\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2013-01-29 14:10+0000\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: LANGUAGE <LL@li.org>\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+msgid "i18n with glossary terms inconsistency"
+msgstr "I18N WITH GLOSSARY TERMS INCONSISTENCY"
+
+msgid "link to :term:`Some term` and :term:`Some other term`."
+msgstr "LINK TO :term:`SOME NEW TERM`."
diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/index.po b/tests/roots/test-intl/xx/LC_MESSAGES/index.po new file mode 100644 index 0000000..a4646f1 --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/index.po @@ -0,0 +1,29 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2010, Georg Brandl & Team +# This file is distributed under the same license as the Sphinx <Tests> package. +# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: Sphinx <Tests> 0.6\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-12-16 14:11+0000\n" +"PO-Revision-Date: 2012-12-18 06:14+0900\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "Table of Contents" +msgstr "TABLE OF CONTENTS" + +msgid "Hidden Toc" +msgstr "HIDDEN TOC" + +msgid "testdata for i18n" +msgstr "TESTDATA FOR I18N" + +msgid "i18n, sphinx, markup" +msgstr "I18N, SPHINX, MARKUP" diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/index_entries.po b/tests/roots/test-intl/xx/LC_MESSAGES/index_entries.po new file mode 100644 index 0000000..83619b4 --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/index_entries.po @@ -0,0 +1,77 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2013, foo +# This file is distributed under the same license as the foo package. +# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: foo foo\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2013-01-05 18:10+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "i18n with index entries" +msgstr "" + +msgid "index target section" +msgstr "" + +msgid "this is :index:`Newsletter` target paragraph." +msgstr "THIS IS :index:`NEWSLETTER` TARGET PARAGRAPH." + +msgid "various index entries" +msgstr "" + +msgid "That's all." +msgstr "" + +msgid "Mailing List" +msgstr "MAILING LIST" + +msgid "Newsletter" +msgstr "NEWSLETTER" + +msgid "Recipients List" +msgstr "RECIPIENTS LIST" + +msgid "First" +msgstr "FIRST" + +msgid "Second" +msgstr "SECOND" + +msgid "Third" +msgstr "THIRD" + +msgid "Entry" +msgstr "ENTRY" + +msgid "See" +msgstr "SEE" + +msgid "Module" +msgstr "MODULE" + +msgid "Keyword" +msgstr "KEYWORD" + +msgid "Operator" +msgstr "OPERATOR" + +msgid "Object" +msgstr "OBJECT" + +msgid "Exception" +msgstr "EXCEPTION" + +msgid "Statement" +msgstr "STATEMENT" + +msgid "Builtin" +msgstr "BUILTIN" diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/label_target.po b/tests/roots/test-intl/xx/LC_MESSAGES/label_target.po new file mode 100644 index 0000000..60d7c3e --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/label_target.po @@ -0,0 +1,66 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2013, sphinx +# This file is distributed under the same license as the sphinx package. +# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: 1.2\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2013-06-19 00:33+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "section and label" +msgstr "X SECTION AND LABEL" + +msgid "" +":ref:`implicit-target` point to ``implicit-target`` and " +"`section and label`_ point to ``section-and-label``." +msgstr "" +":ref:`implicit-target` POINT TO ``implicit-target`` AND " +"`X SECTION AND LABEL`_ POINT TO ``section-and-label``." + +msgid "explicit-target" +msgstr "X EXPLICIT-TARGET" + +msgid "" +":ref:`explicit-target` point to ``explicit-target`` and `explicit-target`_" +" point to duplicated id like ``id1``." +msgstr "" +":ref:`explicit-target` POINT TO ``explicit-target`` AND `X EXPLICIT-TARGET`_" +" POINT TO DUPLICATED ID LIKE ``id1``." + +msgid "implicit section name" +msgstr "X IMPLICIT SECTION NAME" + +msgid "`implicit section name`_ point to ``implicit-section-name``." +msgstr "`X IMPLICIT SECTION NAME`_ POINT TO ``implicit-section-name``." + +msgid "duplicated sub section" +msgstr "X DUPLICATED SUB SECTION" + +msgid "" +"`duplicated sub section`_ is broken link." +msgstr "" +"`X DUPLICATED SUB SECTION`_ IS BROKEN LINK." + +msgid "label bridged target section" +msgstr "X LABEL BRIDGED TARGET SECTION" + +msgid "`bridge label`_ is not translatable but linked to translated section title." +msgstr "X `bridge label`_ IS NOT TRANSLATABLE BUT LINKED TO TRANSLATED SECTION TITLE." + +msgid "" +"`bridge label2`_ point to ``section and label`` and `bridge label`_ point to " +"``label bridged target section``. The second appeared `bridge label2`_ point " +"to correct target." +msgstr "" +"X `bridge label`_ POINT TO ``LABEL BRIDGED TARGET SECTION`` AND " +"`bridge label2`_ POINT TO ``SECTION AND LABEL``. THE SECOND APPEARED " +"`bridge label2`_ POINT TO CORRECT TARGET." diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/literalblock.po b/tests/roots/test-intl/xx/LC_MESSAGES/literalblock.po new file mode 100644 index 0000000..8d3e5d8 --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/literalblock.po @@ -0,0 +1,103 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2012, foof +# This file is distributed under the same license as the foo package. +# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: sphinx 1.0\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-11-22 08:28+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "i18n with literal block" +msgstr "I18N WITH LITERAL BLOCK" + +msgid "Correct literal block::" +msgstr "CORRECT LITERAL BLOCK::" + +msgid "Missing literal block::" +msgstr "MISSING LITERAL BLOCK::" + +msgid "That's all." +msgstr "THAT'S ALL." + +msgid "included raw.txt" +msgstr "INCLUDED RAW.TXT" + +msgid "code blocks" +msgstr "CODE-BLOCKS" + +msgid "" +"def main\n" +" 'result'\n" +"end" +msgstr "" +"def main\n" +" 'RESULT'\n" +"end" + +msgid "example of C language" +msgstr "EXAMPLE OF C LANGUAGE" + +msgid "" +"#include <stdlib.h>\n" +"int main(int argc, char** argv)\n" +"{\n" +" return 0;\n" +"}" +msgstr "" +"#include <STDLIB.H>\n" +"int main(int ARGC, char** ARGV)\n" +"{\n" +" return 0;\n" +"}" + +msgid "" +"#include <stdio.h>\n" +"int main(int argc, char** argv)\n" +"{\n" +" return 0;\n" +"}" +msgstr "" +"#include <STDIO.H>\n" +"int main(int ARGC, char** ARGV)\n" +"{\n" +" return 0;\n" +"}" + +msgid "literal-block\n" +"in list" +msgstr "LITERAL-BLOCK\n" +"IN LIST" + +msgid "test_code_for_noqa()\n" +"continued()" +msgstr "" +"# TRAILING noqa SHOULD NOT GET STRIPPED\n" +"# FROM THIS BLOCK. #noqa" + +msgid "doctest blocks" +msgstr "DOCTEST-BLOCKS" + +msgid "" +">>> import sys # sys importing\n" +">>> def main(): # define main function\n" +"... sys.stdout.write('hello') # call write method of stdout object\n" +">>>\n" +">>> if __name__ == '__main__': # if run this py file as python script\n" +"... main() # call main" +msgstr "" +">>> import sys # SYS IMPORTING\n" +">>> def main(): # DEFINE MAIN FUNCTION\n" +"... sys.stdout.write('hello') # CALL WRITE METHOD OF STDOUT OBJECT\n" +">>>\n" +">>> if __name__ == '__main__': # IF RUN THIS PY FILE AS PYTHON SCRIPT\n" +"... main() # CALL MAIN" + diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/noqa.po b/tests/roots/test-intl/xx/LC_MESSAGES/noqa.po new file mode 100644 index 0000000..1af66b4 --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/noqa.po @@ -0,0 +1,46 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) +# This file is distributed under the same license as the Sphinx intl <Tests> package. +# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. +# +msgid "" +msgstr "" +"Project-Id-Version: \n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2022-01-16 15:23+0100\n" +"PO-Revision-Date: 2022-01-16 15:23+0100\n" +"Last-Translator: Jean Abou Samra <jean@abou-samra.fr>\n" +"Language-Team: \n" +"Language: xx\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"X-Generator: Poedit 3.0\n" + +#: ../tests/roots/test-intl/noqa.txt:2 +msgid "First section" +msgstr "FIRST SECTION" + +#: ../tests/roots/test-intl/noqa.txt:4 +msgid "Some text with a reference, :ref:`next-section`." +msgstr "TRANSLATED TEXT WITHOUT REFERENCE. #noqa" + +#: ../tests/roots/test-intl/noqa.txt:6 +msgid "Another reference: :ref:`next-section`." +msgstr "" +"TEST noqa WHITESPACE INSENSITIVITY.\n" +"# \n" +" noqa" + +#: ../tests/roots/test-intl/noqa.txt:8 +msgid "This should allow to test escaping ``#noqa``." +msgstr "``#noqa`` IS ESCAPED AT THE END OF THIS STRING. \\#noqa" + +#: ../tests/roots/test-intl/noqa.txt:13 +msgid "Next section" +msgstr "NEXT SECTION WITH PARAGRAPH TO TEST BARE noqa" + +# This edge case should not fail. +#: ../tests/roots/test-intl/noqa.txt:15 +msgid "Some text, again referring to the section: :ref:`next-section`." +msgstr "#noqa" diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/only.po b/tests/roots/test-intl/xx/LC_MESSAGES/only.po new file mode 100644 index 0000000..43eb7d6 --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/only.po @@ -0,0 +1,29 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2010, Georg Brandl & Team +# This file is distributed under the same license as the Sphinx <Tests> package. +# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: Sphinx <Tests> 0.6\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2013-02-04 13:06+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "Only directive" +msgstr "ONLY DIRECTIVE" + +msgid "In HTML." +msgstr "IN HTML." + +msgid "In LaTeX." +msgstr "IN LATEX." + +msgid "In both." +msgstr "IN BOTH." diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/raw.po b/tests/roots/test-intl/xx/LC_MESSAGES/raw.po new file mode 100644 index 0000000..f2e8893 --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/raw.po @@ -0,0 +1,21 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2015, dev +# This file is distributed under the same license as the 1235 package. +# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: 1235 1.0\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2015-02-22 15:22+0900\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "<iframe src=\"http://sphinx-doc.org\"></iframe>" +msgstr "<iframe src=\"HTTP://SPHINX-DOC.ORG\"></iframe>" + diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/refs.po b/tests/roots/test-intl/xx/LC_MESSAGES/refs.po new file mode 100644 index 0000000..510a5a7 --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/refs.po @@ -0,0 +1,85 @@ +# +msgid "" +msgstr "" +"Project-Id-Version: 1191 1.3\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2015-08-08 15:31+0900\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "Translation Tips" +msgstr "X TIPS" + +msgid "A-1. Here's how you can `download Sphinx`_." +msgstr "A-1. HERE'S HOW YOU CAN `download Sphinx`_." + +msgid "A-2. Here's how you can `download Sphinx`_." +msgstr "A-2. HERE'S HOW YOU CAN `A1 DOWNLOAD SPHINX`_." + +msgid "A-3. Here's how you can `download Sphinx`_." +msgstr "" +"A-3. HERE'S HOW YOU CAN `A3 DOWNLOAD SPHINX <download Sphinx_>`_ AND `A3 DOWNLOAD " +"SPHINX <download Sphinx_>`_." + +msgid "B-1. `Docutils site`_ and `Sphinx site`_." +msgstr "B-1. `Docutils site`_ and `Sphinx site`_." + +msgid "B-2. `Docutils site`_ and `Sphinx site`_." +msgstr "B-2. `B1 DOCUTILS SITE`_ AND `B1 SPHINX SITE`_." + +msgid "B-3. `Docutils site`_ and `Sphinx site`_." +msgstr "B-3. `B2 SPHINX SITE`_ AND `B2 DOCUTILS SITE`_." + +msgid "B-4. `Docutils site`_ and `Sphinx site`_." +msgstr "" +"B-4. `B4 SPHINX SITE <Sphinx site_>`_ AND `B4 DOCUTILS SITE <Docutils " +"site_>`_." + +msgid "B-5. `Docutils site`_ and `Sphinx site`_." +msgstr "" +"B-5. `B5 SPHINX SITE <Sphinx site_>`_ AND `B5 DOCUTILS SITE <Docutils " +"site_>`_\" AND `B5 SPHINX SITE <Sphinx site_>`_." + +msgid "C-1. Link to `Translation Tips`_ section." +msgstr "C-1. LINK TO `Translation Tips`_ SECTION." + +msgid "C-2. Link to `Translation Tips`_ section." +msgstr "C-2. LINK TO `X TIPS`_ SECTION." + +msgid "C-3. Link to `Translation Tips`_ section." +msgstr "C-3. LINK TO `X TIPS <Translation Tips_>`_ SECTION." + +msgid "C-4. Link to `Translation Tips`_ section." +msgstr "" +"C-4. LINK TO `X TIPS <Translation Tips_>`_ x `X TIPS <Translation Tips_>`_ " +"SECTION." + +msgid "C-5. Link to `Translation Tips`_ section." +msgstr "" +"C-5. LINK TO `TRANS <X TIPS_>`_ x `LATION <X TIPS_>`_ " + +msgid "D-1. Link to `Translation Tips`_ and `Next Section`_ section." +msgstr "D-1. LINK TO `Translation Tips`_ and `Next Section`_ SECTION." + +msgid "D-2. Link to `Translation Tips`_ and `Next Section`_ section." +msgstr "D-2. LINK TO `X TIPS`_ AND `N SECTION`_ SECTION." + +msgid "D-3. Link to `Translation Tips`_ and `Next Section`_ section." +msgstr "D-3. LINK TO `N SECTION`_ AND `X TIPS`_ SECTION." + +msgid "D-4. Link to `Translation Tips`_ and `Next Section`_ section." +msgstr "" +"D-4. LINK TO `N SECTION <Next Section_>`_ AND `X TIPS <Translation Tips_>`_ " +"SECTION." + +msgid "D-5. Link to `Translation Tips`_ and `Next Section`_ section." +msgstr "" +"D-5. LINK TO `Next <N SECTION_>`_ AND `Tips <X TIPS_>`_ " + +msgid "Next Section" +msgstr "N SECTION" + diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/refs_inconsistency.po b/tests/roots/test-intl/xx/LC_MESSAGES/refs_inconsistency.po new file mode 100644 index 0000000..9d8d13f --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/refs_inconsistency.po @@ -0,0 +1,39 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2012, foof +# This file is distributed under the same license as the foo package. +# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: sphinx 1.0\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-12-05 08:28+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "i18n with refs inconsistency" +msgstr "I18N WITH REFS INCONSISTENCY" + +msgid "[100]_ for [#]_ citation [ref2]_." +msgstr "FOR CITATION [ref3]_." + +msgid "for reference_." +msgstr "reference_ FOR reference_." + +msgid "normal text." +msgstr "ORPHAN REFERENCE: `I18N WITH REFS INCONSISTENCY`_." + +msgid "This is a auto numbered footnote." +msgstr "THIS IS A AUTO NUMBERED FOOTNOTE." + +msgid "This is a citation." +msgstr "THIS IS A CITATION." + +msgid "This is a numbered footnote." +msgstr "THIS IS A NUMBERED FOOTNOTE." + diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/refs_python_domain.po b/tests/roots/test-intl/xx/LC_MESSAGES/refs_python_domain.po new file mode 100644 index 0000000..bed87c4 --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/refs_python_domain.po @@ -0,0 +1,25 @@ +# +msgid "" +msgstr "" +"Project-Id-Version: issue1363 1363\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2014-03-16 19:34+0900\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +#: ..\..\index.rst:4 +msgid "i18n with python domain refs" +msgstr "I18N WITH PYTHON DOMAIN REFS" + +#: ..\..\index.rst:8 +msgid "See this decorator: :func:`sensitive_variables`." +msgstr "SEE THIS DECORATOR: :func:`sensitive_variables`." + +#: ..\..\index.rst:12 +msgid "Some description" +msgstr "SOME DESCRIPTION" + diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/role_xref.po b/tests/roots/test-intl/xx/LC_MESSAGES/role_xref.po new file mode 100644 index 0000000..96d821f --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/role_xref.po @@ -0,0 +1,47 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2012, foof +# This file is distributed under the same license as the foo package. +# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: sphinx 1.0\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2013-02-04 14:00+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "i18n role xref" +msgstr "I18N ROCK'N ROLE XREF" + +msgid "link to :term:`Some term`, :ref:`i18n-role-xref`, :doc:`index`." +msgstr "LINK TO :ref:`i18n-role-xref`, :doc:`index`, :term:`SOME NEW TERM`." + +msgid "same type links" +msgstr "SAME TYPE LINKS" + +msgid "link to :term:`Some term` and :term:`Some other term`." +msgstr "LINK TO :term:`SOME OTHER NEW TERM` AND :term:`SOME NEW TERM`." + +msgid "link to :ref:`i18n-role-xref`, :ref:`same-type-links` and :ref:`label <same-type-links>`." +msgstr "LINK TO :ref:`LABEL <i18n-role-xref>` AND :ref:`same-type-links` AND :ref:`same-type-links`." + +msgid "link to :doc:`index` and :doc:`glossary_terms`." +msgstr "LINK TO :doc:`glossary_terms` AND :doc:`index`." + +msgid "link to :option:`-m` and :option:`--module`." +msgstr "LINK TO :option:`--module` AND :option:`-m`." + +msgid "link to :envvar:`env1` and :envvar:`env2`." +msgstr "LINK TO :envvar:`env2` AND :envvar:`env1`." + +msgid "link to :token:`token1` and :token:`token2`." +msgstr "LINK TO :token:`token2` AND :token:`token1`." + +msgid "link to :keyword:`i18n-role-xref` and :keyword:`same-type-links`." +msgstr "LINK TO :keyword:`same-type-links` AND :keyword:`i18n-role-xref`." diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/rubric.po b/tests/roots/test-intl/xx/LC_MESSAGES/rubric.po new file mode 100644 index 0000000..9137623 --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/rubric.po @@ -0,0 +1,29 @@ +# SOME DESCRIPTIVE TITLE.
+# Copyright (C) 2012, foof
+# This file is distributed under the same license as the foo package.
+# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: sphinx 1.0\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2013-11-12 07:00+0000\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: LANGUAGE <LL@li.org>\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+msgid "i18n with rubric"
+msgstr "I18N WITH RUBRIC"
+
+msgid "rubric title"
+msgstr "RUBRIC TITLE"
+
+msgid "rubric in the block"
+msgstr "RUBRIC IN THE BLOCK"
+
+msgid "block"
+msgstr "BLOCK"
diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/section.po b/tests/roots/test-intl/xx/LC_MESSAGES/section.po new file mode 100644 index 0000000..4af349c --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/section.po @@ -0,0 +1,28 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2018, dev +# This file is distributed under the same license as the sphinx package. +# FIRST AUTHOR <EMAIL@ADDRESS>, 2018. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: sphinx 1.0\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2018-05-06 16:44+0900\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: Babel 2.4.0\n" + +msgid "1. Section" +msgstr "1. SECTION" + +msgid "2. Sub Section" +msgstr "2. SUB SECTION" + +msgid "3. Contents Title" +msgstr "3. CONTENTS TITLE" + diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/seealso.po b/tests/roots/test-intl/xx/LC_MESSAGES/seealso.po new file mode 100644 index 0000000..86a1c73 --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/seealso.po @@ -0,0 +1,33 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2010, Georg Brandl & Team +# This file is distributed under the same license as the Sphinx <Tests> package. +# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: Sphinx <Tests> 0.6\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-12-16 06:06+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "i18n with seealso" +msgstr "I18N WITH SEEALSO" + +msgid "short text 1" +msgstr "SHORT TEXT 1" + +msgid "long text 1" +msgstr "LONG TEXT 1" + +msgid "short text 2" +msgstr "SHORT TEXT 2" + +msgid "long text 2" +msgstr "LONG TEXT 2" + diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/sphinx.po b/tests/roots/test-intl/xx/LC_MESSAGES/sphinx.po new file mode 100644 index 0000000..a236f2f --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/sphinx.po @@ -0,0 +1,23 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2012, foof +# This file is distributed under the same license as the foo package. +# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: sphinx 1.0\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-11-22 08:28+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "Welcome" +msgstr "WELCOME" + +msgid "Sphinx %(version)s" +msgstr "SPHINX %(version)s" diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/table.po b/tests/roots/test-intl/xx/LC_MESSAGES/table.po new file mode 100644 index 0000000..d8ffd35 --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/table.po @@ -0,0 +1,54 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2010, Georg Brandl & Team +# This file is distributed under the same license as the Sphinx <Tests> package. +# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: Sphinx <Tests> 0.6\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-12-16 06:06+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "i18n with table" +msgstr "I18N WITH TABLE" + +msgid "table caption" +msgstr "TABLE CAPTION" + +msgid "header1" +msgstr "HEADER1" + +msgid "header2" +msgstr "HEADER2" + +msgid "text1" +msgstr "TEXT1" + +msgid "text2" +msgstr "TEXT2" + +msgid "text3" +msgstr "TEXT3" + +msgid "text1" +msgstr "TEXT1" + +msgid "text4" +msgstr "TEXT4" + +msgid "text5" +msgstr "TEXT5" + +msgid "text6" +msgstr "TEXT6" + +msgid "1. table caption" +msgstr "1. TABLE CAPTION" + diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/toctree.po b/tests/roots/test-intl/xx/LC_MESSAGES/toctree.po new file mode 100644 index 0000000..62cccdf --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/toctree.po @@ -0,0 +1,31 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) +# This file is distributed under the same license as the Sphinx intl <Tests> package. +# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: Sphinx intl <Tests> 2013.120\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2019-11-01 10:24+0900\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +#: ../../toctree.txt:4 +msgid "figure" +msgstr "FIGURE" + +#: ../../toctree.txt:4 +#: ../../toctree.txt:4 +msgid "caption" +msgstr "CAPTION" + +#: ../../toctree.txt:2 +msgid "i18n with toctree" +msgstr "I18N WITH TOCTREE" + diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/topic.po b/tests/roots/test-intl/xx/LC_MESSAGES/topic.po new file mode 100644 index 0000000..53ecb0d --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/topic.po @@ -0,0 +1,31 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2018, dev +# This file is distributed under the same license as the sphinx package. +# FIRST AUTHOR <EMAIL@ADDRESS>, 2018. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: sphinx 1.0\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2018-05-06 16:44+0900\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: Babel 2.4.0\n" + +msgid "i18n with topic" +msgstr "I18N WITH TOPIC" + +msgid "Topic Title" +msgstr "TOPIC TITLE" + +msgid "Topic Content" +msgstr "TOPIC CONTENT" + +msgid "1. Topic Title" +msgstr "1. TOPIC TITLE" + diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/translation_progress.po b/tests/roots/test-intl/xx/LC_MESSAGES/translation_progress.po new file mode 100644 index 0000000..94673d1 --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/translation_progress.po @@ -0,0 +1,57 @@ +msgid "" +msgstr "" +"Project-Id-Version: \n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2000-01-01 00:00\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: \n" +"Language: xx\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "Translation Progress" +msgstr "TRANSLATION PROGRESS" + +msgid "When, in disgrace with fortune and men’s eyes," +msgstr "WHEN, IN DISGRACE WITH FORTUNE AND MEN’S EYES," + +msgid "I all alone beweep my outcast state," +msgstr "I ALL ALONE BEWEEP MY OUTCAST STATE," + +msgid "And trouble deaf heaven with my bootless cries," +msgstr "AND TROUBLE DEAF HEAVEN WITH MY BOOTLESS CRIES," + +msgid "And look upon myself, and curse my fate," +msgstr "AND LOOK UPON MYSELF, AND CURSE MY FATE," + +msgid "Wishing me like to one more rich in hope," +msgstr "WISHING ME LIKE TO ONE MORE RICH IN HOPE," + +msgid "Featur’d like him, like him with friends possess’d," +msgstr "FEATUR’D LIKE HIM, LIKE HIM WITH FRIENDS POSSESS’D," + +msgid "Desiring this man’s art and that man’s scope," +msgstr "DESIRING THIS MAN’S ART AND THAT MAN’S SCOPE," + +msgid "With what I most enjoy contented least;" +msgstr "WITH WHAT I MOST ENJOY CONTENTED LEAST;" + +# idempotent translations (2 out of 14 lines): + +msgid "Yet in these thoughts myself almost despising," +msgstr "Yet in these thoughts myself almost despising," + +msgid "Haply I think on thee, and then my state," +msgstr "Haply I think on thee, and then my state," + +# untranslated (2 out of 14 lines): + +msgid "Like to the lark at break of day arising" +msgstr "" + +msgid "From sullen earth, sings hymns at heaven’s gate;" +msgstr "" + +# translation missing (2 out of 14 lines): diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/versionchange.po b/tests/roots/test-intl/xx/LC_MESSAGES/versionchange.po new file mode 100644 index 0000000..5a8df38 --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/versionchange.po @@ -0,0 +1,33 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2010, Georg Brandl & Team +# This file is distributed under the same license as the Sphinx <Tests> package. +# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: sphinx 1.0\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-12-15 03:17+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "i18n with versionchange" +msgstr "I18N WITH VERSIONCHANGE" + +msgid "This is the *first* paragraph of deprecated." +msgstr "THIS IS THE *FIRST* PARAGRAPH OF DEPRECATED." + +msgid "This is the *second* paragraph of deprecated." +msgstr "THIS IS THE *SECOND* PARAGRAPH OF DEPRECATED." + +msgid "This is the *first* paragraph of versionadded." +msgstr "THIS IS THE *FIRST* PARAGRAPH OF VERSIONADDED." + +msgid "This is the *first* paragraph of versionchanged." +msgstr "THIS IS THE *FIRST* PARAGRAPH OF VERSIONCHANGED." + diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/warnings.po b/tests/roots/test-intl/xx/LC_MESSAGES/warnings.po new file mode 100644 index 0000000..7963a0a --- /dev/null +++ b/tests/roots/test-intl/xx/LC_MESSAGES/warnings.po @@ -0,0 +1,23 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2010, Georg Brandl & Team +# This file is distributed under the same license as the Sphinx <Tests> package. +# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: Sphinx <Tests> 0.6\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2013-02-04 13:06+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "i18n with reST warnings" +msgstr "I18N WITH REST WARNINGS" + +msgid "line of ``literal`` markup." +msgstr "LINE OF ``BROKEN LITERAL MARKUP." diff --git a/tests/roots/test-intl_substitution_definitions/conf.py b/tests/roots/test-intl_substitution_definitions/conf.py new file mode 100644 index 0000000..5e43033 --- /dev/null +++ b/tests/roots/test-intl_substitution_definitions/conf.py @@ -0,0 +1,13 @@ +exclude_patterns = ['_build'] + +rst_prolog = """\ +.. |subst_prolog_1| replace:: prologue substitute text + +.. |subst_prolog_2| image:: /img.png +""" + +rst_epilog = """\ +.. |subst_epilog_1| replace:: epilogue substitute text + +.. |subst_epilog_2| image:: /i18n.png +""" diff --git a/tests/roots/test-intl_substitution_definitions/i18n.png b/tests/roots/test-intl_substitution_definitions/i18n.png Binary files differnew file mode 100644 index 0000000..a97e86d --- /dev/null +++ b/tests/roots/test-intl_substitution_definitions/i18n.png diff --git a/tests/roots/test-intl_substitution_definitions/img.png b/tests/roots/test-intl_substitution_definitions/img.png Binary files differnew file mode 100644 index 0000000..a97e86d --- /dev/null +++ b/tests/roots/test-intl_substitution_definitions/img.png diff --git a/tests/roots/test-intl_substitution_definitions/index.rst b/tests/roots/test-intl_substitution_definitions/index.rst new file mode 100644 index 0000000..9b8c155 --- /dev/null +++ b/tests/roots/test-intl_substitution_definitions/index.rst @@ -0,0 +1,10 @@ +CONTENTS +======== + +.. toctree:: + :maxdepth: 2 + :numbered: + :caption: Table of Contents + + prolog_epilog_substitution + prolog_epilog_substitution_excluded diff --git a/tests/roots/test-intl_substitution_definitions/prolog_epilog_substitution.rst b/tests/roots/test-intl_substitution_definitions/prolog_epilog_substitution.rst new file mode 100644 index 0000000..4127ba4 --- /dev/null +++ b/tests/roots/test-intl_substitution_definitions/prolog_epilog_substitution.rst @@ -0,0 +1,12 @@ +:tocdepth: 2 + +i18n with prologue and epilogue substitutions +============================================= + +This is content that contains |subst_prolog_1|. + +Substituted image |subst_prolog_2| here. + +This is content that contains |subst_epilog_1|. + +Substituted image |subst_epilog_2| here. diff --git a/tests/roots/test-intl_substitution_definitions/prolog_epilog_substitution_excluded.rst b/tests/roots/test-intl_substitution_definitions/prolog_epilog_substitution_excluded.rst new file mode 100644 index 0000000..0ddfc74 --- /dev/null +++ b/tests/roots/test-intl_substitution_definitions/prolog_epilog_substitution_excluded.rst @@ -0,0 +1,6 @@ +:tocdepth: 2 + +i18n without prologue and epilogue substitutions +================================================ + +This is content that does not include prologue and epilogue substitutions. diff --git a/tests/roots/test-intl_substitution_definitions/xx/LC_MESSAGES/prolog_epilog_substitution.po b/tests/roots/test-intl_substitution_definitions/xx/LC_MESSAGES/prolog_epilog_substitution.po new file mode 100644 index 0000000..3ce51fe --- /dev/null +++ b/tests/roots/test-intl_substitution_definitions/xx/LC_MESSAGES/prolog_epilog_substitution.po @@ -0,0 +1,38 @@ +msgid "" +msgstr "" +"Project-Id-Version: sphinx tests\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2021-07-21 12:00+0800\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "i18n with prologue and epilogue substitutions" +msgstr "I18N WITH PROLOGUE AND EPILOGUE SUBSTITUTIONS" + +msgid "This is content that contains |subst_prolog_1|." +msgstr "THIS IS CONTENT THAT CONTAINS |subst_prolog_1|." + +msgid "Substituted image |subst_prolog_2| here." +msgstr "SUBSTITUTED IMAGE |subst_prolog_2| HERE." + +msgid "This is content that contains |subst_epilog_1|." +msgstr "THIS IS CONTENT THAT CONTAINS |subst_epilog_1|." + +msgid "Substituted image |subst_epilog_2| here." +msgstr "SUBSTITUTED IMAGE |subst_epilog_2| HERE." + +msgid "subst_prolog_2" +msgstr "SUBST_PROLOG_2 TRANSLATED" + +msgid ".. image:: /img.png" +msgstr ".. image:: /i18n.png" + +msgid "subst_epilog_2" +msgstr "SUBST_EPILOG_2 TRANSLATED" + +msgid ".. image:: /i18n.png" +msgstr ".. image:: /img.png" diff --git a/tests/roots/test-keep_warnings/conf.py b/tests/roots/test-keep_warnings/conf.py new file mode 100644 index 0000000..b7b3c31 --- /dev/null +++ b/tests/roots/test-keep_warnings/conf.py @@ -0,0 +1 @@ +keep_warnings = True diff --git a/tests/roots/test-keep_warnings/index.rst b/tests/roots/test-keep_warnings/index.rst new file mode 100644 index 0000000..1e2d597 --- /dev/null +++ b/tests/roots/test-keep_warnings/index.rst @@ -0,0 +1,2 @@ +keep_warnings +===== diff --git a/tests/roots/test-latex-babel/bar.rst b/tests/roots/test-latex-babel/bar.rst new file mode 100644 index 0000000..c1ddf30 --- /dev/null +++ b/tests/roots/test-latex-babel/bar.rst @@ -0,0 +1,4 @@ +=== +Bar +=== + diff --git a/tests/roots/test-latex-babel/conf.py b/tests/roots/test-latex-babel/conf.py new file mode 100644 index 0000000..175e3ff --- /dev/null +++ b/tests/roots/test-latex-babel/conf.py @@ -0,0 +1,5 @@ +numfig_format = { + 'figure': 'Fig. %s', + 'table': 'Table. %s', + 'code-block': 'List.', +} diff --git a/tests/roots/test-latex-babel/foo.rst b/tests/roots/test-latex-babel/foo.rst new file mode 100644 index 0000000..cecc672 --- /dev/null +++ b/tests/roots/test-latex-babel/foo.rst @@ -0,0 +1,4 @@ +=== +Foo +=== + diff --git a/tests/roots/test-latex-babel/index.rst b/tests/roots/test-latex-babel/index.rst new file mode 100644 index 0000000..7c19f9e --- /dev/null +++ b/tests/roots/test-latex-babel/index.rst @@ -0,0 +1,8 @@ +test-tocdepth +============= + +.. toctree:: + :caption: Table of content + + foo + bar diff --git a/tests/roots/test-latex-container/conf.py b/tests/roots/test-latex-container/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-latex-container/conf.py diff --git a/tests/roots/test-latex-container/index.rst b/tests/roots/test-latex-container/index.rst new file mode 100644 index 0000000..899788b --- /dev/null +++ b/tests/roots/test-latex-container/index.rst @@ -0,0 +1,4 @@ +.. container:: classname + + text +
\ No newline at end of file diff --git a/tests/roots/test-latex-equations/conf.py b/tests/roots/test-latex-equations/conf.py new file mode 100644 index 0000000..d851892 --- /dev/null +++ b/tests/roots/test-latex-equations/conf.py @@ -0,0 +1,2 @@ +root_doc = 'equations' +extensions = ['sphinx.ext.imgmath'] diff --git a/tests/roots/test-latex-equations/equations.rst b/tests/roots/test-latex-equations/equations.rst new file mode 100644 index 0000000..2eef2f2 --- /dev/null +++ b/tests/roots/test-latex-equations/equations.rst @@ -0,0 +1,21 @@ +test-latex-equation +=================== + +Equation without a label. + +.. math:: + + E = mc^2 + +Equation with label. + +.. math:: E = hv + :label: test + +Second equation without label. + +.. math:: + + c^2 = a^2 + b^2 + +Equation with label :eq:`test` is important. diff --git a/tests/roots/test-latex-equations/expects/latex-equations.tex b/tests/roots/test-latex-equations/expects/latex-equations.tex new file mode 100644 index 0000000..5374a67 --- /dev/null +++ b/tests/roots/test-latex-equations/expects/latex-equations.tex @@ -0,0 +1,18 @@ + +\sphinxAtStartPar +Equation without a label. +\begin{equation*} +\begin{split}E = mc^2\end{split} +\end{equation*} +\sphinxAtStartPar +Equation with label. +\begin{equation}\label{equation:equations:test} +\begin{split}E = hv\end{split} +\end{equation} +\sphinxAtStartPar +Second equation without label. +\begin{equation*} +\begin{split}c^2 = a^2 + b^2\end{split} +\end{equation*} +\sphinxAtStartPar +Equation with label \eqref{equation:equations:test} is important. diff --git a/tests/roots/test-latex-figure-in-admonition/conf.py b/tests/roots/test-latex-figure-in-admonition/conf.py new file mode 100644 index 0000000..a45d22e --- /dev/null +++ b/tests/roots/test-latex-figure-in-admonition/conf.py @@ -0,0 +1 @@ +exclude_patterns = ['_build'] diff --git a/tests/roots/test-latex-figure-in-admonition/img.png b/tests/roots/test-latex-figure-in-admonition/img.png Binary files differnew file mode 100644 index 0000000..a97e86d --- /dev/null +++ b/tests/roots/test-latex-figure-in-admonition/img.png diff --git a/tests/roots/test-latex-figure-in-admonition/index.rst b/tests/roots/test-latex-figure-in-admonition/index.rst new file mode 100644 index 0000000..e3d39d3 --- /dev/null +++ b/tests/roots/test-latex-figure-in-admonition/index.rst @@ -0,0 +1,9 @@ +Test Figure in Admonition +========================= + +.. caution:: + + This uses a figure in an admonition. + + .. figure:: img.png + diff --git a/tests/roots/test-latex-includegraphics/conf.py b/tests/roots/test-latex-includegraphics/conf.py new file mode 100644 index 0000000..65c19ab --- /dev/null +++ b/tests/roots/test-latex-includegraphics/conf.py @@ -0,0 +1,47 @@ +exclude_patterns = ['_build'] + +latex_elements = { + 'preamble': r''' +\makeatletter +\def\dividetwolengths#1#2{\the\dimexpr + \numexpr65536*\dimexpr#1\relax/\dimexpr#2\relax sp}% +\newwrite\out +\immediate\openout\out=\jobname-dimensions.txt +\def\toout{\immediate\write\out} +\def\getWfromoptions #1width=#2,#3\relax{\def\WidthFromOption{#2}}% +\def\getHfromoptions #1height=#2,#3\relax{\def\HeightFromOption{#2}}% +\def\tempincludegraphics[#1]#2{% + \sphinxsafeincludegraphics[#1]{#2}% + \edef\obtainedratio + {\dividetwolengths\spx@image@requiredheight\spx@image@requiredwidth}% + \getWfromoptions#1,width=,\relax + \getHfromoptions#1,height=,\relax + \def\ratiocheck{}% + \ifx\WidthFromOption\empty\else + \ifx\HeightFromOption\empty\else + \edef\askedforratio{\dividetwolengths\HeightFromOption\WidthFromOption}% + \edef\ratiocheck{\dividetwolengths\obtainedratio\askedforratio}% + \fi\fi + \toout{original options = #1^^J% + width = \the\dimexpr\spx@image@requiredwidth, + linewidth = \the\linewidth^^J% + height = \the\dimexpr\spx@image@requiredheight, + maxheight = \the\spx@image@maxheight^^J% + obtained H/W = \obtainedratio^^J% + \ifx\ratiocheck\empty + \else + asked for H/W = \askedforratio^^J% + ratio of ratios = \ratiocheck^^J% + \fi + }% + \ifx\ratiocheck\empty + \else + \ifpdfabsdim\dimexpr\ratiocheck-1pt\relax > 0.01pt + \ASPECTRATIOERROR + \fi + \fi +} +\def\sphinxincludegraphics#1#{\tempincludegraphics#1} +\makeatother +''', +} diff --git a/tests/roots/test-latex-includegraphics/img.png b/tests/roots/test-latex-includegraphics/img.png Binary files differnew file mode 100644 index 0000000..a97e86d --- /dev/null +++ b/tests/roots/test-latex-includegraphics/img.png diff --git a/tests/roots/test-latex-includegraphics/index.rst b/tests/roots/test-latex-includegraphics/index.rst new file mode 100644 index 0000000..920c749 --- /dev/null +++ b/tests/roots/test-latex-includegraphics/index.rst @@ -0,0 +1,37 @@ +==================== +Test image inclusion +==================== + +Tests with both width and height +-------------------------------- + +.. an image with big dimensions, ratio H/W = 1/5 +.. image:: img.png + :height: 200 + :width: 1000 + +.. topic:: Oversized images + + .. an image with big dimensions, ratio H/W = 5/1 + .. image:: img.png + :height: 1000 + :width: 200 + + .. height too big even if width reduced to linewidth, ratio H/W = 3/1 + .. image:: img.png + :width: 1000 + :height: 3000 + +Tests with only width or height +------------------------------- + +.. topic:: Oversized images + + .. tall image which does not fit in textheight even if width rescaled + .. image:: tall.png + :width: 1000 + +.. wide image which does not fit in linewidth even after height diminished +.. image:: sphinx.png + :height: 1000 + diff --git a/tests/roots/test-latex-includegraphics/sphinx.png b/tests/roots/test-latex-includegraphics/sphinx.png Binary files differnew file mode 100644 index 0000000..0a103cd --- /dev/null +++ b/tests/roots/test-latex-includegraphics/sphinx.png diff --git a/tests/roots/test-latex-includegraphics/tall.png b/tests/roots/test-latex-includegraphics/tall.png Binary files differnew file mode 100644 index 0000000..c98c058 --- /dev/null +++ b/tests/roots/test-latex-includegraphics/tall.png diff --git a/tests/roots/test-latex-index/conf.py b/tests/roots/test-latex-index/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-latex-index/conf.py diff --git a/tests/roots/test-latex-index/index.rst b/tests/roots/test-latex-index/index.rst new file mode 100644 index 0000000..5b61279 --- /dev/null +++ b/tests/roots/test-latex-index/index.rst @@ -0,0 +1,16 @@ +test-latex-index +================ + +A :index:`famous` :index:`equation`: + +.. math:: + + E = m c^2 + +.. index:: Einstein, relativity + +and some text. + +.. index:: main { + +An index entry containing non paired curly brace diff --git a/tests/roots/test-latex-labels-before-module/automodule1.py b/tests/roots/test-latex-labels-before-module/automodule1.py new file mode 100644 index 0000000..0545aa4 --- /dev/null +++ b/tests/roots/test-latex-labels-before-module/automodule1.py @@ -0,0 +1,2 @@ +"""docstring""" + diff --git a/tests/roots/test-latex-labels-before-module/automodule2a.py b/tests/roots/test-latex-labels-before-module/automodule2a.py new file mode 100644 index 0000000..0545aa4 --- /dev/null +++ b/tests/roots/test-latex-labels-before-module/automodule2a.py @@ -0,0 +1,2 @@ +"""docstring""" + diff --git a/tests/roots/test-latex-labels-before-module/automodule2b.py b/tests/roots/test-latex-labels-before-module/automodule2b.py new file mode 100644 index 0000000..0545aa4 --- /dev/null +++ b/tests/roots/test-latex-labels-before-module/automodule2b.py @@ -0,0 +1,2 @@ +"""docstring""" + diff --git a/tests/roots/test-latex-labels-before-module/automodule3.py b/tests/roots/test-latex-labels-before-module/automodule3.py new file mode 100644 index 0000000..0545aa4 --- /dev/null +++ b/tests/roots/test-latex-labels-before-module/automodule3.py @@ -0,0 +1,2 @@ +"""docstring""" + diff --git a/tests/roots/test-latex-labels-before-module/conf.py b/tests/roots/test-latex-labels-before-module/conf.py new file mode 100644 index 0000000..25193b0 --- /dev/null +++ b/tests/roots/test-latex-labels-before-module/conf.py @@ -0,0 +1,8 @@ +import os +import sys + +sys.path.insert(0, os.path.abspath('.')) + +extensions = ['sphinx.ext.autodoc'] + +nitpicky = True diff --git a/tests/roots/test-latex-labels-before-module/index.rst b/tests/roots/test-latex-labels-before-module/index.rst new file mode 100644 index 0000000..e6df749 --- /dev/null +++ b/tests/roots/test-latex-labels-before-module/index.rst @@ -0,0 +1,48 @@ +latex-labels-before-module +========================== + +.. _label_1a: +.. _label_1b: + +.. module:: module1 + + text + +.. _label_2: + +.. module:: module2a + + text + +.. module:: module2b + + text + +.. _label_3: + +.. module:: module3 + + text + +.. _label_auto_1a: +.. _label_auto_1b: + +.. automodule:: automodule1 + + text + +.. _label_auto_2: + +.. automodule:: automodule2a + + text + +.. automodule:: automodule2b + + text + +.. _label_auto_3: + +.. automodule:: automodule3 + + text diff --git a/tests/roots/test-latex-labels/conf.py b/tests/roots/test-latex-labels/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-latex-labels/conf.py diff --git a/tests/roots/test-latex-labels/index.rst b/tests/roots/test-latex-labels/index.rst new file mode 100644 index 0000000..f3c4217 --- /dev/null +++ b/tests/roots/test-latex-labels/index.rst @@ -0,0 +1,72 @@ +latex-labels +============ + +figures +------- + +.. _figure1: +.. _figure2: + +.. figure:: logo.jpg + + labeled figure + +.. figure:: logo.jpg + :name: figure3 + + labeled figure + + with a legend + +code-blocks +----------- + +.. _codeblock1: +.. _codeblock2: + +.. code-block:: none + + blah blah blah + +.. code-block:: none + :name: codeblock3 + + blah blah blah + +tables +------ + +.. _table1: +.. _table2: + +.. table:: table caption + + ==== ==== + head head + cell cell + ==== ==== + +.. table:: table caption + :name: table3 + + ==== ==== + head head + cell cell + ==== ==== + +.. _section1: +.. _section2: + +subsection +---------- + +.. _section3: + +subsubsection +~~~~~~~~~~~~~ + +.. toctree:: + + otherdoc + +* Embedded standalone hyperlink reference(refs: #5948): `subsection <section1_>`_. diff --git a/tests/roots/test-latex-labels/otherdoc.rst b/tests/roots/test-latex-labels/otherdoc.rst new file mode 100644 index 0000000..55c5ca0 --- /dev/null +++ b/tests/roots/test-latex-labels/otherdoc.rst @@ -0,0 +1,2 @@ +otherdoc +======== diff --git a/tests/roots/test-latex-numfig/conf.py b/tests/roots/test-latex-numfig/conf.py new file mode 100644 index 0000000..287bd1c --- /dev/null +++ b/tests/roots/test-latex-numfig/conf.py @@ -0,0 +1,8 @@ +extensions = ['sphinx.ext.imgmath'] # for math_numfig + +latex_documents = [ + ('indexmanual', 'SphinxManual.tex', 'Test numfig manual', + 'Sphinx', 'manual'), + ('indexhowto', 'SphinxHowTo.tex', 'Test numfig howto', + 'Sphinx', 'howto'), +] diff --git a/tests/roots/test-latex-numfig/index.rst b/tests/roots/test-latex-numfig/index.rst new file mode 100644 index 0000000..6b8b968 --- /dev/null +++ b/tests/roots/test-latex-numfig/index.rst @@ -0,0 +1,9 @@ +================= +test-latex-numfig +================= + +.. toctree:: + :numbered: + + indexmanual + indexhowto diff --git a/tests/roots/test-latex-numfig/indexhowto.rst b/tests/roots/test-latex-numfig/indexhowto.rst new file mode 100644 index 0000000..4749f1e --- /dev/null +++ b/tests/roots/test-latex-numfig/indexhowto.rst @@ -0,0 +1,10 @@ +======================= +test-latex-numfig-howto +======================= + +This is a part +============== + +This is a section +----------------- + diff --git a/tests/roots/test-latex-numfig/indexmanual.rst b/tests/roots/test-latex-numfig/indexmanual.rst new file mode 100644 index 0000000..8bab4fb --- /dev/null +++ b/tests/roots/test-latex-numfig/indexmanual.rst @@ -0,0 +1,13 @@ +======================== +test-latex-numfig-manual +======================== + +First part +========== + +This is chapter +--------------- + +This is section +~~~~~~~~~~~~~~~ + diff --git a/tests/roots/test-latex-table/_mytemplates/latex/longtable.tex_t b/tests/roots/test-latex-table/_mytemplates/latex/longtable.tex_t new file mode 100644 index 0000000..e2cb1db --- /dev/null +++ b/tests/roots/test-latex-table/_mytemplates/latex/longtable.tex_t @@ -0,0 +1 @@ +SALUT LES COPAINS diff --git a/tests/roots/test-latex-table/complex.rst b/tests/roots/test-latex-table/complex.rst new file mode 100644 index 0000000..d648ff1 --- /dev/null +++ b/tests/roots/test-latex-table/complex.rst @@ -0,0 +1,58 @@ +complex tables +============== + +grid table +---------- + +.. rst-class:: nocolorrows + ++---------+---------+---------+ +| header1 | header2 | header3 | ++=========+=========+=========+ +| cell1-1 | cell1-2 | cell1-3 | ++---------+ +---------+ +| cell2-1 | | cell2-3 | ++ +---------+---------+ +| | cell3-2-par1 | ++---------+ | +| cell4-1 | cell3-2-par2 | ++---------+---------+---------+ +| cell5-1 | ++---------+---------+---------+ + +grid table with tabularcolumns having no vline +---------------------------------------------- + +.. tabularcolumns:: TTT + ++---------+---------+---------+ +| header1 | header2 | header3 | ++=========+=========+=========+ +| cell1-1 | cell1-2 | cell1-3 | ++---------+ +---------+ +| cell2-1 | | cell2-3 | ++ +---------+---------+ +| | cell3-2-par1 | ++---------+ | +| cell4-1 | cell3-2-par2 | ++---------+---------+---------+ +| cell5-1 | ++---------+---------+---------+ + +complex spanning cell +--------------------- + +table having ... + +* consecutive multirow at top of row (1-1 and 1-2) +* consecutive multirow at end of row (1-4 and 1-5) + +.. rst-class:: standard + ++-----------+-----------+-----------+-----------+-----------+ +| | | cell1-3 | | | +| | +-----------+ | cell1-5 | +| cell1-1 | cell1-2 | | cell1-4 | | +| | | cell2-3 | +-----------+ +| | | | | cell3-5 | ++-----------+-----------+-----------+-----------+-----------+ diff --git a/tests/roots/test-latex-table/conf.py b/tests/roots/test-latex-table/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-latex-table/conf.py diff --git a/tests/roots/test-latex-table/expects/complex_spanning_cell.tex b/tests/roots/test-latex-table/expects/complex_spanning_cell.tex new file mode 100644 index 0000000..d2d6189 --- /dev/null +++ b/tests/roots/test-latex-table/expects/complex_spanning_cell.tex @@ -0,0 +1,69 @@ +\label{\detokenize{complex:complex-spanning-cell}} +\sphinxAtStartPar +table having … +\begin{itemize} +\item {} +\sphinxAtStartPar +consecutive multirow at top of row (1\sphinxhyphen{}1 and 1\sphinxhyphen{}2) + +\item {} +\sphinxAtStartPar +consecutive multirow at end of row (1\sphinxhyphen{}4 and 1\sphinxhyphen{}5) + +\end{itemize} + + +\begin{savenotes}\sphinxattablestart +\sphinxthistablewithglobalstyle +\sphinxthistablewithstandardstyle +\centering +\begin{tabulary}{\linewidth}[t]{|T|T|T|T|T|} +\sphinxtoprule +\sphinxtableatstartofbodyhook\sphinxmultirow{3}{1}{% +\begin{varwidth}[t]{\sphinxcolwidth{1}{5}} +\sphinxAtStartPar +cell1\sphinxhyphen{}1 +\par +\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}% +}% +&\sphinxmultirow{3}{2}{% +\begin{varwidth}[t]{\sphinxcolwidth{1}{5}} +\sphinxAtStartPar +cell1\sphinxhyphen{}2 +\par +\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}% +}% +& +\sphinxAtStartPar +cell1\sphinxhyphen{}3 +&\sphinxmultirow{3}{4}{% +\begin{varwidth}[t]{\sphinxcolwidth{1}{5}} +\sphinxAtStartPar +cell1\sphinxhyphen{}4 +\par +\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}% +}% +&\sphinxmultirow{2}{5}{% +\begin{varwidth}[t]{\sphinxcolwidth{1}{5}} +\sphinxAtStartPar +cell1\sphinxhyphen{}5 +\par +\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}% +}% +\\ +\sphinxvlinecrossing{1}\sphinxcline{3-3}\sphinxvlinecrossing{4}\sphinxfixclines{5}\sphinxtablestrut{1}&\sphinxtablestrut{2}&\sphinxmultirow{2}{6}{% +\begin{varwidth}[t]{\sphinxcolwidth{1}{5}} +\sphinxAtStartPar +cell2\sphinxhyphen{}3 +\par +\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}% +}% +&\sphinxtablestrut{4}&\sphinxtablestrut{5}\\ +\sphinxvlinecrossing{1}\sphinxvlinecrossing{2}\sphinxvlinecrossing{3}\sphinxcline{5-5}\sphinxfixclines{5}\sphinxtablestrut{1}&\sphinxtablestrut{2}&\sphinxtablestrut{6}&\sphinxtablestrut{4}& +\sphinxAtStartPar +cell3\sphinxhyphen{}5 +\\ +\sphinxbottomrule +\end{tabulary} +\sphinxtableafterendhook\par +\sphinxattableend\end{savenotes} diff --git a/tests/roots/test-latex-table/expects/gridtable.tex b/tests/roots/test-latex-table/expects/gridtable.tex new file mode 100644 index 0000000..407abe7 --- /dev/null +++ b/tests/roots/test-latex-table/expects/gridtable.tex @@ -0,0 +1,73 @@ +\label{\detokenize{complex:grid-table}} + +\begin{savenotes}\sphinxattablestart +\sphinxthistablewithglobalstyle +\sphinxthistablewithnocolorrowsstyle +\centering +\begin{tabulary}{\linewidth}[t]{|T|T|T|} +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header3 +\\ +\sphinxmidrule +\sphinxtableatstartofbodyhook +\sphinxAtStartPar +cell1\sphinxhyphen{}1 +&\sphinxmultirow{2}{5}{% +\begin{varwidth}[t]{\sphinxcolwidth{1}{3}} +\sphinxAtStartPar +cell1\sphinxhyphen{}2 +\par +\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}% +}% +& +\sphinxAtStartPar +cell1\sphinxhyphen{}3 +\\ +\sphinxcline{1-1}\sphinxcline{3-3}\sphinxfixclines{3}\sphinxmultirow{2}{7}{% +\begin{varwidth}[t]{\sphinxcolwidth{1}{3}} +\sphinxAtStartPar +cell2\sphinxhyphen{}1 +\par +\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}% +}% +&\sphinxtablestrut{5}& +\sphinxAtStartPar +cell2\sphinxhyphen{}3 +\\ +\sphinxcline{2-3}\sphinxfixclines{3}\sphinxtablestrut{7}&\sphinxstartmulticolumn{2}% +\sphinxmultirow{2}{9}{% +\begin{varwidth}[t]{\sphinxcolwidth{2}{3}} +\sphinxAtStartPar +cell3\sphinxhyphen{}2\sphinxhyphen{}par1 + +\sphinxAtStartPar +cell3\sphinxhyphen{}2\sphinxhyphen{}par2 +\par +\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}% +}% +\sphinxstopmulticolumn +\\ +\sphinxcline{1-1}\sphinxfixclines{3} +\sphinxAtStartPar +cell4\sphinxhyphen{}1 +&\multicolumn{2}{l|}{\sphinxtablestrut{9}}\\ +\sphinxhline\sphinxstartmulticolumn{3}% +\begin{varwidth}[t]{\sphinxcolwidth{3}{3}} +\sphinxAtStartPar +cell5\sphinxhyphen{}1 +\par +\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}% +\sphinxstopmulticolumn +\\ +\sphinxbottomrule +\end{tabulary} +\sphinxtableafterendhook\par +\sphinxattableend\end{savenotes} diff --git a/tests/roots/test-latex-table/expects/gridtable_with_tabularcolumn.tex b/tests/roots/test-latex-table/expects/gridtable_with_tabularcolumn.tex new file mode 100644 index 0000000..c77b990 --- /dev/null +++ b/tests/roots/test-latex-table/expects/gridtable_with_tabularcolumn.tex @@ -0,0 +1,73 @@ +\label{\detokenize{complex:grid-table-with-tabularcolumns-having-no-vline}} + +\begin{savenotes}\sphinxattablestart +\sphinxthistablewithglobalstyle +\sphinxthistablewithnovlinesstyle +\centering +\begin{tabulary}{\linewidth}[t]{TTT} +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header3 +\\ +\sphinxmidrule +\sphinxtableatstartofbodyhook +\sphinxAtStartPar +cell1\sphinxhyphen{}1 +&\sphinxmultirow{2}{5}{% +\begin{varwidth}[t]{\sphinxcolwidth{1}{3}} +\sphinxAtStartPar +cell1\sphinxhyphen{}2 +\par +\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}% +}% +& +\sphinxAtStartPar +cell1\sphinxhyphen{}3 +\\ +\sphinxcline{1-1}\sphinxcline{3-3}\sphinxfixclines{3}\sphinxmultirow{2}{7}{% +\begin{varwidth}[t]{\sphinxcolwidth{1}{3}} +\sphinxAtStartPar +cell2\sphinxhyphen{}1 +\par +\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}% +}% +&\sphinxtablestrut{5}& +\sphinxAtStartPar +cell2\sphinxhyphen{}3 +\\ +\sphinxcline{2-3}\sphinxfixclines{3}\sphinxtablestrut{7}&\sphinxstartmulticolumn{2}% +\sphinxmultirow{2}{9}{% +\begin{varwidth}[t]{\sphinxcolwidth{2}{3}} +\sphinxAtStartPar +cell3\sphinxhyphen{}2\sphinxhyphen{}par1 + +\sphinxAtStartPar +cell3\sphinxhyphen{}2\sphinxhyphen{}par2 +\par +\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}% +}% +\sphinxstopmulticolumn +\\ +\sphinxcline{1-1}\sphinxfixclines{3} +\sphinxAtStartPar +cell4\sphinxhyphen{}1 +&\multicolumn{2}{l}{\sphinxtablestrut{9}}\\ +\sphinxhline\sphinxstartmulticolumn{3}% +\begin{varwidth}[t]{\sphinxcolwidth{3}{3}} +\sphinxAtStartPar +cell5\sphinxhyphen{}1 +\par +\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}% +\sphinxstopmulticolumn +\\ +\sphinxbottomrule +\end{tabulary} +\sphinxtableafterendhook\par +\sphinxattableend\end{savenotes} diff --git a/tests/roots/test-latex-table/expects/longtable.tex b/tests/roots/test-latex-table/expects/longtable.tex new file mode 100644 index 0000000..1fe1022 --- /dev/null +++ b/tests/roots/test-latex-table/expects/longtable.tex @@ -0,0 +1,70 @@ +\label{\detokenize{longtable:longtable}} + +\begin{savenotes} +\sphinxatlongtablestart +\sphinxthistablewithglobalstyle +\sphinxthistablewithborderlessstyle +\makeatletter + \LTleft \@totalleftmargin plus1fill + \LTright\dimexpr\columnwidth-\@totalleftmargin-\linewidth\relax plus1fill +\makeatother +\begin{longtable}{ll} +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +\\ +\sphinxmidrule +\endfirsthead + +\multicolumn{2}{c}{\sphinxnorowcolor + \makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}% +}\\ +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +\\ +\sphinxmidrule +\endhead + +\sphinxbottomrule +\multicolumn{2}{r}{\sphinxnorowcolor + \makebox[0pt][r]{\sphinxtablecontinued{continues on next page}}% +}\\ +\endfoot + +\endlastfoot +\sphinxtableatstartofbodyhook + +\sphinxAtStartPar +cell1\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell1\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell2\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell2\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell3\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell3\sphinxhyphen{}2 +\\ +\sphinxbottomrule +\end{longtable} +\sphinxtableafterendhook +\sphinxatlongtableend +\end{savenotes} diff --git a/tests/roots/test-latex-table/expects/longtable_having_align.tex b/tests/roots/test-latex-table/expects/longtable_having_align.tex new file mode 100644 index 0000000..4a4df18 --- /dev/null +++ b/tests/roots/test-latex-table/expects/longtable_having_align.tex @@ -0,0 +1,69 @@ +\label{\detokenize{longtable:longtable-having-align-option}} + +\begin{savenotes} +\sphinxatlongtablestart +\sphinxthistablewithglobalstyle +\makeatletter + \LTleft \@totalleftmargin plus1fill + \LTright\dimexpr\columnwidth-\@totalleftmargin-\linewidth\relax +\makeatother +\begin{longtable}{|l|l|} +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +\\ +\sphinxmidrule +\endfirsthead + +\multicolumn{2}{c}{\sphinxnorowcolor + \makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}% +}\\ +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +\\ +\sphinxmidrule +\endhead + +\sphinxbottomrule +\multicolumn{2}{r}{\sphinxnorowcolor + \makebox[0pt][r]{\sphinxtablecontinued{continues on next page}}% +}\\ +\endfoot + +\endlastfoot +\sphinxtableatstartofbodyhook + +\sphinxAtStartPar +cell1\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell1\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell2\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell2\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell3\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell3\sphinxhyphen{}2 +\\ +\sphinxbottomrule +\end{longtable} +\sphinxtableafterendhook +\sphinxatlongtableend +\end{savenotes} diff --git a/tests/roots/test-latex-table/expects/longtable_having_caption.tex b/tests/roots/test-latex-table/expects/longtable_having_caption.tex new file mode 100644 index 0000000..a1aa65d --- /dev/null +++ b/tests/roots/test-latex-table/expects/longtable_having_caption.tex @@ -0,0 +1,71 @@ +\label{\detokenize{longtable:longtable-having-caption}} + +\begin{savenotes} +\sphinxatlongtablestart +\sphinxthistablewithglobalstyle +\makeatletter + \LTleft \@totalleftmargin plus1fill + \LTright\dimexpr\columnwidth-\@totalleftmargin-\linewidth\relax plus1fill +\makeatother +\begin{longtable}{|l|l|} +\sphinxthelongtablecaptionisattop +\caption{caption for longtable\strut}\label{\detokenize{longtable:id1}}\\*[\sphinxlongtablecapskipadjust] +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +\\ +\sphinxmidrule +\endfirsthead + +\multicolumn{2}{c}{\sphinxnorowcolor + \makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}% +}\\ +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +\\ +\sphinxmidrule +\endhead + +\sphinxbottomrule +\multicolumn{2}{r}{\sphinxnorowcolor + \makebox[0pt][r]{\sphinxtablecontinued{continues on next page}}% +}\\ +\endfoot + +\endlastfoot +\sphinxtableatstartofbodyhook + +\sphinxAtStartPar +cell1\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell1\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell2\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell2\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell3\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell3\sphinxhyphen{}2 +\\ +\sphinxbottomrule +\end{longtable} +\sphinxtableafterendhook +\sphinxatlongtableend +\end{savenotes} diff --git a/tests/roots/test-latex-table/expects/longtable_having_problematic_cell.tex b/tests/roots/test-latex-table/expects/longtable_having_problematic_cell.tex new file mode 100644 index 0000000..240a760 --- /dev/null +++ b/tests/roots/test-latex-table/expects/longtable_having_problematic_cell.tex @@ -0,0 +1,76 @@ +\label{\detokenize{longtable:longtable-having-problematic-cell}} + +\begin{savenotes} +\sphinxatlongtablestart +\sphinxthistablewithglobalstyle +\makeatletter + \LTleft \@totalleftmargin plus1fill + \LTright\dimexpr\columnwidth-\@totalleftmargin-\linewidth\relax plus1fill +\makeatother +\begin{longtable}{|*{2}{\X{1}{2}|}} +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +\\ +\sphinxmidrule +\endfirsthead + +\multicolumn{2}{c}{\sphinxnorowcolor + \makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}% +}\\ +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +\\ +\sphinxmidrule +\endhead + +\sphinxbottomrule +\multicolumn{2}{r}{\sphinxnorowcolor + \makebox[0pt][r]{\sphinxtablecontinued{continues on next page}}% +}\\ +\endfoot + +\endlastfoot +\sphinxtableatstartofbodyhook +\begin{itemize} +\item {} +\sphinxAtStartPar +item1 + +\item {} +\sphinxAtStartPar +item2 + +\end{itemize} +& +\sphinxAtStartPar +cell1\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell2\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell2\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell3\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell3\sphinxhyphen{}2 +\\ +\sphinxbottomrule +\end{longtable} +\sphinxtableafterendhook +\sphinxatlongtableend +\end{savenotes} diff --git a/tests/roots/test-latex-table/expects/longtable_having_stub_columns_and_problematic_cell.tex b/tests/roots/test-latex-table/expects/longtable_having_stub_columns_and_problematic_cell.tex new file mode 100644 index 0000000..897830b --- /dev/null +++ b/tests/roots/test-latex-table/expects/longtable_having_stub_columns_and_problematic_cell.tex @@ -0,0 +1,81 @@ +\label{\detokenize{longtable:longtable-having-both-stub-columns-and-problematic-cell}} + +\begin{savenotes} +\sphinxatlongtablestart +\sphinxthistablewithglobalstyle +\makeatletter + \LTleft \@totalleftmargin plus1fill + \LTright\dimexpr\columnwidth-\@totalleftmargin-\linewidth\relax plus1fill +\makeatother +\begin{longtable}{|*{3}{\X{1}{3}|}} +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header3 +\\ +\sphinxmidrule +\endfirsthead + +\multicolumn{3}{c}{\sphinxnorowcolor + \makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}% +}\\ +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header3 +\\ +\sphinxmidrule +\endhead + +\sphinxbottomrule +\multicolumn{3}{r}{\sphinxnorowcolor + \makebox[0pt][r]{\sphinxtablecontinued{continues on next page}}% +}\\ +\endfoot + +\endlastfoot +\sphinxtableatstartofbodyhook +\sphinxstyletheadfamily \begin{itemize} +\item {} +\sphinxAtStartPar +instub1\sphinxhyphen{}1a + +\item {} +\sphinxAtStartPar +instub1\sphinxhyphen{}1b + +\end{itemize} +&\sphinxstyletheadfamily +\sphinxAtStartPar +instub1\sphinxhyphen{}2 +& +\sphinxAtStartPar +notinstub1\sphinxhyphen{}3 +\\ +\sphinxhline\sphinxstyletheadfamily +\sphinxAtStartPar +cell2\sphinxhyphen{}1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +cell2\sphinxhyphen{}2 +& +\sphinxAtStartPar +cell2\sphinxhyphen{}3 +\\ +\sphinxbottomrule +\end{longtable} +\sphinxtableafterendhook +\sphinxatlongtableend +\end{savenotes} diff --git a/tests/roots/test-latex-table/expects/longtable_having_verbatim.tex b/tests/roots/test-latex-table/expects/longtable_having_verbatim.tex new file mode 100644 index 0000000..b9f7512 --- /dev/null +++ b/tests/roots/test-latex-table/expects/longtable_having_verbatim.tex @@ -0,0 +1,70 @@ +\label{\detokenize{longtable:longtable-having-verbatim}} + +\begin{savenotes} +\sphinxatlongtablestart +\sphinxthistablewithglobalstyle +\makeatletter + \LTleft \@totalleftmargin plus1fill + \LTright\dimexpr\columnwidth-\@totalleftmargin-\linewidth\relax plus1fill +\makeatother +\begin{longtable}{|*{2}{\X{1}{2}|}} +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +\\ +\sphinxmidrule +\endfirsthead + +\multicolumn{2}{c}{\sphinxnorowcolor + \makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}% +}\\ +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +\\ +\sphinxmidrule +\endhead + +\sphinxbottomrule +\multicolumn{2}{r}{\sphinxnorowcolor + \makebox[0pt][r]{\sphinxtablecontinued{continues on next page}}% +}\\ +\endfoot + +\endlastfoot +\sphinxtableatstartofbodyhook + +\begin{sphinxVerbatimintable}[commandchars=\\\{\}] +\PYG{n}{hello} \PYG{n}{world} +\end{sphinxVerbatimintable} +& +\sphinxAtStartPar +cell1\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell2\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell2\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell3\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell3\sphinxhyphen{}2 +\\ +\sphinxbottomrule +\end{longtable} +\sphinxtableafterendhook +\sphinxatlongtableend +\end{savenotes} diff --git a/tests/roots/test-latex-table/expects/longtable_having_widths.tex b/tests/roots/test-latex-table/expects/longtable_having_widths.tex new file mode 100644 index 0000000..24dad79 --- /dev/null +++ b/tests/roots/test-latex-table/expects/longtable_having_widths.tex @@ -0,0 +1,73 @@ +\label{\detokenize{longtable:longtable-having-widths-option}} + +\begin{savenotes} +\sphinxatlongtablestart +\sphinxthistablewithglobalstyle +\makeatletter + \LTleft \@totalleftmargin plus1fill + \LTright\dimexpr\columnwidth-\@totalleftmargin-\linewidth\relax plus1fill +\makeatother +\begin{longtable}{|\X{30}{100}|\X{70}{100}|} +\noalign{\phantomsection\label{\detokenize{longtable:namedlongtable}}\label{\detokenize{longtable:mylongtable}}}% +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +\\ +\sphinxmidrule +\endfirsthead + +\multicolumn{2}{c}{\sphinxnorowcolor + \makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}% +}\\ +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +\\ +\sphinxmidrule +\endhead + +\sphinxbottomrule +\multicolumn{2}{r}{\sphinxnorowcolor + \makebox[0pt][r]{\sphinxtablecontinued{continues on next page}}% +}\\ +\endfoot + +\endlastfoot +\sphinxtableatstartofbodyhook + +\sphinxAtStartPar +cell1\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell1\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell2\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell2\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell3\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell3\sphinxhyphen{}2 +\\ +\sphinxbottomrule +\end{longtable} +\sphinxtableafterendhook +\sphinxatlongtableend +\end{savenotes} + +\sphinxAtStartPar +See {\hyperref[\detokenize{longtable:mylongtable}]{\sphinxcrossref{mylongtable}}}, same as {\hyperref[\detokenize{longtable:namedlongtable}]{\sphinxcrossref{\DUrole{std,std-ref}{this one}}}}. diff --git a/tests/roots/test-latex-table/expects/longtable_having_widths_and_problematic_cell.tex b/tests/roots/test-latex-table/expects/longtable_having_widths_and_problematic_cell.tex new file mode 100644 index 0000000..b4758ca --- /dev/null +++ b/tests/roots/test-latex-table/expects/longtable_having_widths_and_problematic_cell.tex @@ -0,0 +1,76 @@ +\label{\detokenize{longtable:longtable-having-both-widths-and-problematic-cell}} + +\begin{savenotes} +\sphinxatlongtablestart +\sphinxthistablewithglobalstyle +\makeatletter + \LTleft \@totalleftmargin plus1fill + \LTright\dimexpr\columnwidth-\@totalleftmargin-\linewidth\relax plus1fill +\makeatother +\begin{longtable}{|\X{30}{100}|\X{70}{100}|} +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +\\ +\sphinxmidrule +\endfirsthead + +\multicolumn{2}{c}{\sphinxnorowcolor + \makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}% +}\\ +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +\\ +\sphinxmidrule +\endhead + +\sphinxbottomrule +\multicolumn{2}{r}{\sphinxnorowcolor + \makebox[0pt][r]{\sphinxtablecontinued{continues on next page}}% +}\\ +\endfoot + +\endlastfoot +\sphinxtableatstartofbodyhook +\begin{itemize} +\item {} +\sphinxAtStartPar +item1 + +\item {} +\sphinxAtStartPar +item2 + +\end{itemize} +& +\sphinxAtStartPar +cell1\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell2\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell2\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell3\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell3\sphinxhyphen{}2 +\\ +\sphinxbottomrule +\end{longtable} +\sphinxtableafterendhook +\sphinxatlongtableend +\end{savenotes} diff --git a/tests/roots/test-latex-table/expects/longtable_with_tabularcolumn.tex b/tests/roots/test-latex-table/expects/longtable_with_tabularcolumn.tex new file mode 100644 index 0000000..4c380fe --- /dev/null +++ b/tests/roots/test-latex-table/expects/longtable_with_tabularcolumn.tex @@ -0,0 +1,70 @@ +\label{\detokenize{longtable:longtable-with-tabularcolumn}} + +\begin{savenotes} +\sphinxatlongtablestart +\sphinxthistablewithglobalstyle +\sphinxthistablewithvlinesstyle +\makeatletter + \LTleft \@totalleftmargin plus1fill + \LTright\dimexpr\columnwidth-\@totalleftmargin-\linewidth\relax plus1fill +\makeatother +\begin{longtable}{|c|c|} +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +\\ +\sphinxmidrule +\endfirsthead + +\multicolumn{2}{c}{\sphinxnorowcolor + \makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}% +}\\ +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +\\ +\sphinxmidrule +\endhead + +\sphinxbottomrule +\multicolumn{2}{r}{\sphinxnorowcolor + \makebox[0pt][r]{\sphinxtablecontinued{continues on next page}}% +}\\ +\endfoot + +\endlastfoot +\sphinxtableatstartofbodyhook + +\sphinxAtStartPar +cell1\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell1\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell2\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell2\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell3\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell3\sphinxhyphen{}2 +\\ +\sphinxbottomrule +\end{longtable} +\sphinxtableafterendhook +\sphinxatlongtableend +\end{savenotes} diff --git a/tests/roots/test-latex-table/expects/simple_table.tex b/tests/roots/test-latex-table/expects/simple_table.tex new file mode 100644 index 0000000..7bd85c7 --- /dev/null +++ b/tests/roots/test-latex-table/expects/simple_table.tex @@ -0,0 +1,40 @@ +\label{\detokenize{tabular:simple-table}} + +\begin{savenotes}\sphinxattablestart +\sphinxthistablewithglobalstyle +\centering +\begin{tabulary}{\linewidth}[t]{|T|T|} +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +\\ +\sphinxmidrule +\sphinxtableatstartofbodyhook +\sphinxAtStartPar +cell1\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell1\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell2\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell2\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell3\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell3\sphinxhyphen{}2 +\\ +\sphinxbottomrule +\end{tabulary} +\sphinxtableafterendhook\par +\sphinxattableend\end{savenotes} diff --git a/tests/roots/test-latex-table/expects/table_having_caption.tex b/tests/roots/test-latex-table/expects/table_having_caption.tex new file mode 100644 index 0000000..f2ce553 --- /dev/null +++ b/tests/roots/test-latex-table/expects/table_having_caption.tex @@ -0,0 +1,44 @@ +\label{\detokenize{tabular:table-having-caption}} + +\begin{savenotes}\sphinxattablestart +\sphinxthistablewithglobalstyle +\centering +\sphinxcapstartof{table} +\sphinxthecaptionisattop +\sphinxcaption{caption for table}\label{\detokenize{tabular:id1}} +\sphinxaftertopcaption +\begin{tabulary}{\linewidth}[t]{|T|T|} +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +\\ +\sphinxmidrule +\sphinxtableatstartofbodyhook +\sphinxAtStartPar +cell1\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell1\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell2\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell2\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell3\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell3\sphinxhyphen{}2 +\\ +\sphinxbottomrule +\end{tabulary} +\sphinxtableafterendhook\par +\sphinxattableend\end{savenotes} diff --git a/tests/roots/test-latex-table/expects/table_having_problematic_cell.tex b/tests/roots/test-latex-table/expects/table_having_problematic_cell.tex new file mode 100644 index 0000000..7d7ad4b --- /dev/null +++ b/tests/roots/test-latex-table/expects/table_having_problematic_cell.tex @@ -0,0 +1,47 @@ +\label{\detokenize{tabular:table-having-problematic-cell}} + +\begin{savenotes}\sphinxattablestart +\sphinxthistablewithglobalstyle +\centering +\begin{tabular}[t]{|*{2}{\X{1}{2}|}} +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +\\ +\sphinxmidrule +\sphinxtableatstartofbodyhook\begin{itemize} +\item {} +\sphinxAtStartPar +item1 + +\item {} +\sphinxAtStartPar +item2 + +\end{itemize} +& +\sphinxAtStartPar +cell1\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell2\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell2\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell3\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell3\sphinxhyphen{}2 +\\ +\sphinxbottomrule +\end{tabular} +\sphinxtableafterendhook\par +\sphinxattableend\end{savenotes} diff --git a/tests/roots/test-latex-table/expects/table_having_stub_columns_and_problematic_cell.tex b/tests/roots/test-latex-table/expects/table_having_stub_columns_and_problematic_cell.tex new file mode 100644 index 0000000..fbd797a --- /dev/null +++ b/tests/roots/test-latex-table/expects/table_having_stub_columns_and_problematic_cell.tex @@ -0,0 +1,49 @@ +\label{\detokenize{tabular:table-having-both-stub-columns-and-problematic-cell}} + +\begin{savenotes}\sphinxattablestart +\sphinxthistablewithglobalstyle +\centering +\begin{tabular}[t]{|*{3}{\X{1}{3}|}} +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header3 +\\ +\sphinxmidrule +\sphinxtableatstartofbodyhook\sphinxstyletheadfamily \begin{itemize} +\item {} +\sphinxAtStartPar +instub1\sphinxhyphen{}1a + +\item {} +\sphinxAtStartPar +instub1\sphinxhyphen{}1b + +\end{itemize} +&\sphinxstyletheadfamily +\sphinxAtStartPar +instub1\sphinxhyphen{}2 +& +\sphinxAtStartPar +notinstub1\sphinxhyphen{}3 +\\ +\sphinxhline\sphinxstyletheadfamily +\sphinxAtStartPar +cell2\sphinxhyphen{}1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +cell2\sphinxhyphen{}2 +& +\sphinxAtStartPar +cell2\sphinxhyphen{}3 +\\ +\sphinxbottomrule +\end{tabular} +\sphinxtableafterendhook\par +\sphinxattableend\end{savenotes} diff --git a/tests/roots/test-latex-table/expects/table_having_threeparagraphs_cell_in_first_col.tex b/tests/roots/test-latex-table/expects/table_having_threeparagraphs_cell_in_first_col.tex new file mode 100644 index 0000000..9acd9a8 --- /dev/null +++ b/tests/roots/test-latex-table/expects/table_having_threeparagraphs_cell_in_first_col.tex @@ -0,0 +1,26 @@ +\label{\detokenize{tabular:table-with-cell-in-first-column-having-three-paragraphs}} + +\begin{savenotes}\sphinxattablestart +\sphinxthistablewithglobalstyle +\centering +\begin{tabulary}{\linewidth}[t]{|T|} +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +\\ +\sphinxmidrule +\sphinxtableatstartofbodyhook +\sphinxAtStartPar +cell1\sphinxhyphen{}1\sphinxhyphen{}par1 + +\sphinxAtStartPar +cell1\sphinxhyphen{}1\sphinxhyphen{}par2 + +\sphinxAtStartPar +cell1\sphinxhyphen{}1\sphinxhyphen{}par3 +\\ +\sphinxbottomrule +\end{tabulary} +\sphinxtableafterendhook\par +\sphinxattableend\end{savenotes} diff --git a/tests/roots/test-latex-table/expects/table_having_verbatim.tex b/tests/roots/test-latex-table/expects/table_having_verbatim.tex new file mode 100644 index 0000000..a002de5 --- /dev/null +++ b/tests/roots/test-latex-table/expects/table_having_verbatim.tex @@ -0,0 +1,41 @@ +\label{\detokenize{tabular:table-having-verbatim}} + +\begin{savenotes}\sphinxattablestart +\sphinxthistablewithglobalstyle +\centering +\begin{tabular}[t]{|*{2}{\X{1}{2}|}} +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +\\ +\sphinxmidrule +\sphinxtableatstartofbodyhook +\begin{sphinxVerbatimintable}[commandchars=\\\{\}] +\PYG{n}{hello} \PYG{n}{world} +\end{sphinxVerbatimintable} +& +\sphinxAtStartPar +cell1\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell2\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell2\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell3\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell3\sphinxhyphen{}2 +\\ +\sphinxbottomrule +\end{tabular} +\sphinxtableafterendhook\par +\sphinxattableend\end{savenotes} diff --git a/tests/roots/test-latex-table/expects/table_having_widths.tex b/tests/roots/test-latex-table/expects/table_having_widths.tex new file mode 100644 index 0000000..fe5f4c4 --- /dev/null +++ b/tests/roots/test-latex-table/expects/table_having_widths.tex @@ -0,0 +1,46 @@ +\label{\detokenize{tabular:table-having-widths-option}} + +\begin{savenotes}\sphinxattablestart +\sphinxthistablewithglobalstyle +\sphinxthistablewithbooktabsstyle +\sphinxthistablewithcolorrowsstyle +\centering +\phantomsection\label{\detokenize{tabular:namedtabular}}\label{\detokenize{tabular:mytabular}}\nobreak +\begin{tabular}[t]{\X{30}{100}\X{70}{100}} +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +\\ +\sphinxmidrule +\sphinxtableatstartofbodyhook +\sphinxAtStartPar +cell1\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell1\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell2\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell2\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell3\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell3\sphinxhyphen{}2 +\\ +\sphinxbottomrule +\end{tabular} +\sphinxtableafterendhook\par +\sphinxattableend\end{savenotes} + +\sphinxAtStartPar +See {\hyperref[\detokenize{tabular:mytabular}]{\sphinxcrossref{\DUrole{std,std-ref}{this}}}}, same as {\hyperref[\detokenize{tabular:namedtabular}]{\sphinxcrossref{namedtabular}}}. diff --git a/tests/roots/test-latex-table/expects/table_having_widths_and_problematic_cell.tex b/tests/roots/test-latex-table/expects/table_having_widths_and_problematic_cell.tex new file mode 100644 index 0000000..1baf92c --- /dev/null +++ b/tests/roots/test-latex-table/expects/table_having_widths_and_problematic_cell.tex @@ -0,0 +1,47 @@ +\label{\detokenize{tabular:table-having-both-widths-and-problematic-cell}} + +\begin{savenotes}\sphinxattablestart +\sphinxthistablewithglobalstyle +\centering +\begin{tabular}[t]{|\X{30}{100}|\X{70}{100}|} +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +\\ +\sphinxmidrule +\sphinxtableatstartofbodyhook\begin{itemize} +\item {} +\sphinxAtStartPar +item1 + +\item {} +\sphinxAtStartPar +item2 + +\end{itemize} +& +\sphinxAtStartPar +cell1\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell2\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell2\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell3\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell3\sphinxhyphen{}2 +\\ +\sphinxbottomrule +\end{tabular} +\sphinxtableafterendhook\par +\sphinxattableend\end{savenotes} diff --git a/tests/roots/test-latex-table/expects/tabular_having_widths.tex b/tests/roots/test-latex-table/expects/tabular_having_widths.tex new file mode 100644 index 0000000..15321d6 --- /dev/null +++ b/tests/roots/test-latex-table/expects/tabular_having_widths.tex @@ -0,0 +1,40 @@ +\label{\detokenize{tabular:table-having-align-option-tabular}} + +\begin{savenotes}\sphinxattablestart +\sphinxthistablewithglobalstyle +\raggedright +\begin{tabular}[t]{|\X{30}{100}|\X{70}{100}|} +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +\\ +\sphinxmidrule +\sphinxtableatstartofbodyhook +\sphinxAtStartPar +cell1\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell1\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell2\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell2\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell3\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell3\sphinxhyphen{}2 +\\ +\sphinxbottomrule +\end{tabular} +\sphinxtableafterendhook\par +\sphinxattableend\end{savenotes} diff --git a/tests/roots/test-latex-table/expects/tabularcolumn.tex b/tests/roots/test-latex-table/expects/tabularcolumn.tex new file mode 100644 index 0000000..fcb01be --- /dev/null +++ b/tests/roots/test-latex-table/expects/tabularcolumn.tex @@ -0,0 +1,41 @@ +\label{\detokenize{tabular:table-with-tabularcolumn}} + +\begin{savenotes}\sphinxattablestart +\sphinxthistablewithglobalstyle +\sphinxthistablewithnovlinesstyle +\centering +\begin{tabulary}{\linewidth}[t]{cc} +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +\\ +\sphinxmidrule +\sphinxtableatstartofbodyhook +\sphinxAtStartPar +cell1\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell1\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell2\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell2\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell3\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell3\sphinxhyphen{}2 +\\ +\sphinxbottomrule +\end{tabulary} +\sphinxtableafterendhook\par +\sphinxattableend\end{savenotes} diff --git a/tests/roots/test-latex-table/expects/tabulary_having_widths.tex b/tests/roots/test-latex-table/expects/tabulary_having_widths.tex new file mode 100644 index 0000000..2463416 --- /dev/null +++ b/tests/roots/test-latex-table/expects/tabulary_having_widths.tex @@ -0,0 +1,40 @@ +\label{\detokenize{tabular:table-having-align-option-tabulary}} + +\begin{savenotes}\sphinxattablestart +\sphinxthistablewithglobalstyle +\raggedleft +\begin{tabulary}{\linewidth}[t]{|T|T|} +\sphinxtoprule +\sphinxstyletheadfamily +\sphinxAtStartPar +header1 +&\sphinxstyletheadfamily +\sphinxAtStartPar +header2 +\\ +\sphinxmidrule +\sphinxtableatstartofbodyhook +\sphinxAtStartPar +cell1\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell1\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell2\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell2\sphinxhyphen{}2 +\\ +\sphinxhline +\sphinxAtStartPar +cell3\sphinxhyphen{}1 +& +\sphinxAtStartPar +cell3\sphinxhyphen{}2 +\\ +\sphinxbottomrule +\end{tabulary} +\sphinxtableafterendhook\par +\sphinxattableend\end{savenotes} diff --git a/tests/roots/test-latex-table/index.rst b/tests/roots/test-latex-table/index.rst new file mode 100644 index 0000000..80dd110 --- /dev/null +++ b/tests/roots/test-latex-table/index.rst @@ -0,0 +1,8 @@ +test-latex-table +================ + +.. toctree:: + + tabular + longtable + complex diff --git a/tests/roots/test-latex-table/longtable.rst b/tests/roots/test-latex-table/longtable.rst new file mode 100644 index 0000000..da6fa5c --- /dev/null +++ b/tests/roots/test-latex-table/longtable.rst @@ -0,0 +1,156 @@ +longtables +========== + +longtable +--------- + +.. table:: + :class: longtable, borderless + + ======= ======= + header1 header2 + ======= ======= + cell1-1 cell1-2 + cell2-1 cell2-2 + cell3-1 cell3-2 + ======= ======= + +longtable having :widths: option +-------------------------------- + +.. _mylongtable: + +.. table:: + :class: longtable + :widths: 30,70 + :name: namedlongtable + + ======= ======= + header1 header2 + ======= ======= + cell1-1 cell1-2 + cell2-1 cell2-2 + cell3-1 cell3-2 + ======= ======= + +See mylongtable_, same as :ref:`this one <namedlongtable>`. + +longtable having :align: option +------------------------------- + +.. table:: + :align: right + :class: longtable + + ======= ======= + header1 header2 + ======= ======= + cell1-1 cell1-2 + cell2-1 cell2-2 + cell3-1 cell3-2 + ======= ======= + +longtable with tabularcolumn +---------------------------- + +.. tabularcolumns:: |c|c| + +.. table:: + :class: longtable + + ======= ======= + header1 header2 + ======= ======= + cell1-1 cell1-2 + cell2-1 cell2-2 + cell3-1 cell3-2 + ======= ======= + +longtable having caption +------------------------ + +.. list-table:: caption for longtable + :class: longtable + :header-rows: 1 + + * - header1 + - header2 + * - cell1-1 + - cell1-2 + * - cell2-1 + - cell2-2 + * - cell3-1 + - cell3-2 + +longtable having verbatim +------------------------- + +.. list-table:: + :class: longtable + :header-rows: 1 + + * - header1 + - header2 + * - :: + + hello world + + - cell1-2 + * - cell2-1 + - cell2-2 + * - cell3-1 + - cell3-2 + +longtable having both :widths: and problematic cell +--------------------------------------------------- + +.. list-table:: + :class: longtable + :header-rows: 1 + :widths: 30,70 + + * - header1 + - header2 + * - + item1 + + item2 + - cell1-2 + * - cell2-1 + - cell2-2 + * - cell3-1 + - cell3-2 + +longtable having problematic cell +--------------------------------- + +.. list-table:: + :class: longtable + :header-rows: 1 + + * - header1 + - header2 + * - + item1 + + item2 + - cell1-2 + * - cell2-1 + - cell2-2 + * - cell3-1 + - cell3-2 + +longtable having both stub columns and problematic cell +------------------------------------------------------- + +.. list-table:: + :class: longtable + :header-rows: 1 + :stub-columns: 2 + + * - header1 + - header2 + - header3 + * - + instub1-1a + + instub1-1b + - instub1-2 + - notinstub1-3 + * - cell2-1 + - cell2-2 + - cell2-3 diff --git a/tests/roots/test-latex-table/tabular.rst b/tests/roots/test-latex-table/tabular.rst new file mode 100644 index 0000000..15db823 --- /dev/null +++ b/tests/roots/test-latex-table/tabular.rst @@ -0,0 +1,173 @@ +tabular and tabulary +==================== + +simple table +------------ + +======= ======= +header1 header2 +======= ======= +cell1-1 cell1-2 +cell2-1 cell2-2 +cell3-1 cell3-2 +======= ======= + +table having :widths: option +---------------------------- + +.. _mytabular: + +.. table:: + :widths: 30,70 + :name: namedtabular + :class: booktabs, colorrows + + ======= ======= + header1 header2 + ======= ======= + cell1-1 cell1-2 + cell2-1 cell2-2 + cell3-1 cell3-2 + ======= ======= + +See :ref:`this <mytabular>`, same as namedtabular_. + +table having :align: option (tabulary) +-------------------------------------- + +.. table:: + :align: right + + ======= ======= + header1 header2 + ======= ======= + cell1-1 cell1-2 + cell2-1 cell2-2 + cell3-1 cell3-2 + ======= ======= + +table having :align: option (tabular) +------------------------------------- + +.. table:: + :align: left + :widths: 30,70 + + ======= ======= + header1 header2 + ======= ======= + cell1-1 cell1-2 + cell2-1 cell2-2 + cell3-1 cell3-2 + ======= ======= + +table with tabularcolumn +------------------------ + +.. tabularcolumns:: cc + +======= ======= +header1 header2 +======= ======= +cell1-1 cell1-2 +cell2-1 cell2-2 +cell3-1 cell3-2 +======= ======= + +table with cell in first column having three paragraphs +------------------------------------------------------- + ++--------------+ +| header1 | ++==============+ +| cell1-1-par1 | +| | +| cell1-1-par2 | +| | +| cell1-1-par3 | ++--------------+ + + +table having caption +-------------------- + +.. list-table:: caption for table + :header-rows: 1 + + * - header1 + - header2 + * - cell1-1 + - cell1-2 + * - cell2-1 + - cell2-2 + * - cell3-1 + - cell3-2 + +table having verbatim +--------------------- + +.. list-table:: + :header-rows: 1 + + * - header1 + - header2 + * - :: + + hello world + + - cell1-2 + * - cell2-1 + - cell2-2 + * - cell3-1 + - cell3-2 + +table having both :widths: and problematic cell +----------------------------------------------- + +.. list-table:: + :header-rows: 1 + :widths: 30,70 + + * - header1 + - header2 + * - + item1 + + item2 + - cell1-2 + * - cell2-1 + - cell2-2 + * - cell3-1 + - cell3-2 + +table having problematic cell +----------------------------- + +.. list-table:: + :header-rows: 1 + + * - header1 + - header2 + * - + item1 + + item2 + - cell1-2 + * - cell2-1 + - cell2-2 + * - cell3-1 + - cell3-2 + +table having both stub columns and problematic cell +--------------------------------------------------- + +.. list-table:: + :header-rows: 1 + :stub-columns: 2 + + * - header1 + - header2 + - header3 + * - + instub1-1a + + instub1-1b + - instub1-2 + - notinstub1-3 + * - cell2-1 + - cell2-2 + - cell2-3 diff --git a/tests/roots/test-latex-theme/conf.py b/tests/roots/test-latex-theme/conf.py new file mode 100644 index 0000000..196307a --- /dev/null +++ b/tests/roots/test-latex-theme/conf.py @@ -0,0 +1,2 @@ +latex_theme = 'custom' +latex_theme_path = ['theme'] diff --git a/tests/roots/test-latex-theme/index.rst b/tests/roots/test-latex-theme/index.rst new file mode 100644 index 0000000..f5b1d53 --- /dev/null +++ b/tests/roots/test-latex-theme/index.rst @@ -0,0 +1,2 @@ +latex_theme +=========== diff --git a/tests/roots/test-latex-theme/theme/custom/theme.conf b/tests/roots/test-latex-theme/theme/custom/theme.conf new file mode 100644 index 0000000..ad8df26 --- /dev/null +++ b/tests/roots/test-latex-theme/theme/custom/theme.conf @@ -0,0 +1,6 @@ +[theme] +docclass = book +wrapperclass = sphinxbook +papersize = a4paper +pointsize = 12pt +toplevel_sectioning = chapter diff --git a/tests/roots/test-latex-title/conf.py b/tests/roots/test-latex-title/conf.py new file mode 100644 index 0000000..6443316 --- /dev/null +++ b/tests/roots/test-latex-title/conf.py @@ -0,0 +1,4 @@ +# set empty string to the third column to use the first section title to document title +latex_documents = [ + ('index', 'test.tex', '', 'Sphinx', 'report') +] diff --git a/tests/roots/test-latex-title/index.rst b/tests/roots/test-latex-title/index.rst new file mode 100644 index 0000000..411ad00 --- /dev/null +++ b/tests/roots/test-latex-title/index.rst @@ -0,0 +1,12 @@ +.. admonition:: Notice + + This generates nodes.title node before first section title. + +test-latex-title +================ + +.. toctree:: + :numbered: + + foo + bar diff --git a/tests/roots/test-latex-unicode/conf.py b/tests/roots/test-latex-unicode/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-latex-unicode/conf.py diff --git a/tests/roots/test-latex-unicode/index.rst b/tests/roots/test-latex-unicode/index.rst new file mode 100644 index 0000000..2abeca9 --- /dev/null +++ b/tests/roots/test-latex-unicode/index.rst @@ -0,0 +1,7 @@ +test-latex-unicode +================== + +* script small e: ℯ +* double struck italic small i: ⅈ +* superscript: ⁰, ¹ +* subscript: ₀, ₁ diff --git a/tests/roots/test-linkcheck-anchors-ignore-for-url/conf.py b/tests/roots/test-linkcheck-anchors-ignore-for-url/conf.py new file mode 100644 index 0000000..0005bfa --- /dev/null +++ b/tests/roots/test-linkcheck-anchors-ignore-for-url/conf.py @@ -0,0 +1,3 @@ +exclude_patterns = ['_build'] +linkcheck_anchors = True +linkcheck_timeout = 0.05 diff --git a/tests/roots/test-linkcheck-anchors-ignore-for-url/index.rst b/tests/roots/test-linkcheck-anchors-ignore-for-url/index.rst new file mode 100644 index 0000000..df287b4 --- /dev/null +++ b/tests/roots/test-linkcheck-anchors-ignore-for-url/index.rst @@ -0,0 +1,7 @@ +* `Example valid url, no anchor <http://localhost:7777/valid>`_ +* `Example valid url, valid anchor <http://localhost:7777/valid#valid-anchor>`_ +* `Example valid url, invalid anchor <http://localhost:7777/valid#invalid-anchor>`_ +* `Example ignored url, no anchor <http://localhost:7777/ignored>`_ +* `Example ignored url, invalid anchor <http://localhost:7777/ignored#invalid-anchor>`_ +* `Example invalid url, no anchor <http://localhost:7777/invalid>`_ +* `Example invalid url, invalid anchor <http://localhost:7777/invalid#anchor>`_ diff --git a/tests/roots/test-linkcheck-anchors-ignore/conf.py b/tests/roots/test-linkcheck-anchors-ignore/conf.py new file mode 100644 index 0000000..0005bfa --- /dev/null +++ b/tests/roots/test-linkcheck-anchors-ignore/conf.py @@ -0,0 +1,3 @@ +exclude_patterns = ['_build'] +linkcheck_anchors = True +linkcheck_timeout = 0.05 diff --git a/tests/roots/test-linkcheck-anchors-ignore/index.rst b/tests/roots/test-linkcheck-anchors-ignore/index.rst new file mode 100644 index 0000000..22a1379 --- /dev/null +++ b/tests/roots/test-linkcheck-anchors-ignore/index.rst @@ -0,0 +1,2 @@ +* `Example Bar invalid <http://localhost:7777/#!bar>`_ +* `Example Bar invalid <http://localhost:7777/#top>`_ diff --git a/tests/roots/test-linkcheck-documents_exclude/br0ken_link.rst b/tests/roots/test-linkcheck-documents_exclude/br0ken_link.rst new file mode 100644 index 0000000..bf421f0 --- /dev/null +++ b/tests/roots/test-linkcheck-documents_exclude/br0ken_link.rst @@ -0,0 +1,5 @@ +Broken link +=========== + +Some links are `broken <https://www.sphinx-doc.org/this-is-another-broken-link>`__ +but sometimes not worrying about some broken links is a valid strategy. diff --git a/tests/roots/test-linkcheck-documents_exclude/broken_link.rst b/tests/roots/test-linkcheck-documents_exclude/broken_link.rst new file mode 100644 index 0000000..86e3bb4 --- /dev/null +++ b/tests/roots/test-linkcheck-documents_exclude/broken_link.rst @@ -0,0 +1,5 @@ +Broken link +=========== + +Some links are `broken <https://www.sphinx-doc.org/this-is-a-broken-link>`__ +but sometimes not worrying about some broken links is a valid strategy. diff --git a/tests/roots/test-linkcheck-documents_exclude/conf.py b/tests/roots/test-linkcheck-documents_exclude/conf.py new file mode 100644 index 0000000..52388f9 --- /dev/null +++ b/tests/roots/test-linkcheck-documents_exclude/conf.py @@ -0,0 +1,6 @@ +exclude_patterns = ['_build'] +linkcheck_exclude_documents = [ + '^broken_link$', + 'br[0-9]ken_link', +] +linkcheck_timeout = 0.05 diff --git a/tests/roots/test-linkcheck-documents_exclude/index.rst b/tests/roots/test-linkcheck-documents_exclude/index.rst new file mode 100644 index 0000000..57c39d8 --- /dev/null +++ b/tests/roots/test-linkcheck-documents_exclude/index.rst @@ -0,0 +1,3 @@ +.. toctree:: + broken_link + br0ken_link
\ No newline at end of file diff --git a/tests/roots/test-linkcheck-localserver-anchor/conf.py b/tests/roots/test-linkcheck-localserver-anchor/conf.py new file mode 100644 index 0000000..0005bfa --- /dev/null +++ b/tests/roots/test-linkcheck-localserver-anchor/conf.py @@ -0,0 +1,3 @@ +exclude_patterns = ['_build'] +linkcheck_anchors = True +linkcheck_timeout = 0.05 diff --git a/tests/roots/test-linkcheck-localserver-anchor/index.rst b/tests/roots/test-linkcheck-localserver-anchor/index.rst new file mode 100644 index 0000000..807fe96 --- /dev/null +++ b/tests/roots/test-linkcheck-localserver-anchor/index.rst @@ -0,0 +1 @@ +`local server <http://localhost:7777/#anchor>`_ diff --git a/tests/roots/test-linkcheck-localserver-https/conf.py b/tests/roots/test-linkcheck-localserver-https/conf.py new file mode 100644 index 0000000..a2ce01e --- /dev/null +++ b/tests/roots/test-linkcheck-localserver-https/conf.py @@ -0,0 +1,2 @@ +exclude_patterns = ['_build'] +linkcheck_timeout = 0.05 diff --git a/tests/roots/test-linkcheck-localserver-https/index.rst b/tests/roots/test-linkcheck-localserver-https/index.rst new file mode 100644 index 0000000..fea5983 --- /dev/null +++ b/tests/roots/test-linkcheck-localserver-https/index.rst @@ -0,0 +1 @@ +`HTTPS server <https://localhost:7777/>`_ diff --git a/tests/roots/test-linkcheck-localserver-warn-redirects/conf.py b/tests/roots/test-linkcheck-localserver-warn-redirects/conf.py new file mode 100644 index 0000000..a2ce01e --- /dev/null +++ b/tests/roots/test-linkcheck-localserver-warn-redirects/conf.py @@ -0,0 +1,2 @@ +exclude_patterns = ['_build'] +linkcheck_timeout = 0.05 diff --git a/tests/roots/test-linkcheck-localserver-warn-redirects/index.rst b/tests/roots/test-linkcheck-localserver-warn-redirects/index.rst new file mode 100644 index 0000000..7359bd5 --- /dev/null +++ b/tests/roots/test-linkcheck-localserver-warn-redirects/index.rst @@ -0,0 +1,3 @@ +`local server1 <http://localhost:7777/path1>`_ + +`local server2 <http://localhost:7777/path2>`_ diff --git a/tests/roots/test-linkcheck-localserver/conf.py b/tests/roots/test-linkcheck-localserver/conf.py new file mode 100644 index 0000000..a2ce01e --- /dev/null +++ b/tests/roots/test-linkcheck-localserver/conf.py @@ -0,0 +1,2 @@ +exclude_patterns = ['_build'] +linkcheck_timeout = 0.05 diff --git a/tests/roots/test-linkcheck-localserver/index.rst b/tests/roots/test-linkcheck-localserver/index.rst new file mode 100644 index 0000000..c617e94 --- /dev/null +++ b/tests/roots/test-linkcheck-localserver/index.rst @@ -0,0 +1 @@ +`local server <http://localhost:7777/>`_ diff --git a/tests/roots/test-linkcheck-raw-node/conf.py b/tests/roots/test-linkcheck-raw-node/conf.py new file mode 100644 index 0000000..a2ce01e --- /dev/null +++ b/tests/roots/test-linkcheck-raw-node/conf.py @@ -0,0 +1,2 @@ +exclude_patterns = ['_build'] +linkcheck_timeout = 0.05 diff --git a/tests/roots/test-linkcheck-raw-node/index.rst b/tests/roots/test-linkcheck-raw-node/index.rst new file mode 100644 index 0000000..76e26b5 --- /dev/null +++ b/tests/roots/test-linkcheck-raw-node/index.rst @@ -0,0 +1,2 @@ +.. raw:: html + :url: http://localhost:7777/ diff --git a/tests/roots/test-linkcheck-too-many-retries/conf.py b/tests/roots/test-linkcheck-too-many-retries/conf.py new file mode 100644 index 0000000..0005bfa --- /dev/null +++ b/tests/roots/test-linkcheck-too-many-retries/conf.py @@ -0,0 +1,3 @@ +exclude_patterns = ['_build'] +linkcheck_anchors = True +linkcheck_timeout = 0.05 diff --git a/tests/roots/test-linkcheck-too-many-retries/index.rst b/tests/roots/test-linkcheck-too-many-retries/index.rst new file mode 100644 index 0000000..29b1ae4 --- /dev/null +++ b/tests/roots/test-linkcheck-too-many-retries/index.rst @@ -0,0 +1 @@ +`Non-existing uri with localhost <https://localhost:7777/doesnotexist>`_ diff --git a/tests/roots/test-linkcheck/conf.py b/tests/roots/test-linkcheck/conf.py new file mode 100644 index 0000000..6ddb41a --- /dev/null +++ b/tests/roots/test-linkcheck/conf.py @@ -0,0 +1,4 @@ +root_doc = 'links' +exclude_patterns = ['_build'] +linkcheck_anchors = True +linkcheck_timeout = 0.05 diff --git a/tests/roots/test-linkcheck/links.rst b/tests/roots/test-linkcheck/links.rst new file mode 100644 index 0000000..88c757e --- /dev/null +++ b/tests/roots/test-linkcheck/links.rst @@ -0,0 +1,14 @@ +Some additional anchors to exercise ignore code + +* `Valid url <http://localhost:7777/>`_ +* `Bar anchor invalid (trailing slash) <http://localhost:7777/#!bar>`_ +* `Bar anchor invalid <http://localhost:7777#!bar>`_ tests that default ignore anchor of #! does not need to be prefixed with / +* `Top anchor invalid <http://localhost:7777/#top>`_ +* `'does-not-exist' anchor invalid <http://localhost:7777#does-not-exist>`_ +* `Valid local file <conf.py>`_ +* `Invalid local file <path/to/notfound>`_ + +.. image:: http://localhost:7777/image.png +.. figure:: http://localhost:7777/image2.png + +* `Valid anchored url <http://localhost:7777/anchor.html#found>`_ diff --git a/tests/roots/test-local-logo/conf.py b/tests/roots/test-local-logo/conf.py new file mode 100644 index 0000000..1a166c1 --- /dev/null +++ b/tests/roots/test-local-logo/conf.py @@ -0,0 +1,4 @@ +latex_documents = [ + ('index', 'test.tex', 'The basic Sphinx documentation for testing', 'Sphinx', 'report') +] +html_logo = "images/img.png" diff --git a/tests/roots/test-local-logo/images/img.png b/tests/roots/test-local-logo/images/img.png Binary files differnew file mode 100644 index 0000000..a97e86d --- /dev/null +++ b/tests/roots/test-local-logo/images/img.png diff --git a/tests/roots/test-local-logo/index.rst b/tests/roots/test-local-logo/index.rst new file mode 100644 index 0000000..af12ed6 --- /dev/null +++ b/tests/roots/test-local-logo/index.rst @@ -0,0 +1,31 @@ +The basic Sphinx documentation for testing +========================================== + +Sphinx is a tool that makes it easy to create intelligent and beautiful +documentation for Python projects (or other documents consisting of multiple +reStructuredText sources), written by Georg Brandl. It was originally created +for the new Python documentation, and has excellent facilities for Python +project documentation, but C/C++ is supported as well, and more languages are +planned. + +Sphinx uses reStructuredText as its markup language, and many of its strengths +come from the power and straightforwardness of reStructuredText and its parsing +and translating suite, the Docutils. + +features +-------- + +Among its features are the following: + +* Output formats: HTML (including derivative formats such as HTML Help, Epub + and Qt Help), plain text, manual pages and LaTeX or direct PDF output + using rst2pdf +* Extensive cross-references: semantic markup and automatic links + for functions, classes, glossary terms and similar pieces of information +* Hierarchical structure: easy definition of a document tree, with automatic + links to siblings, parents and children +* Automatic indices: general index as well as a module index +* Code handling: automatic highlighting using the Pygments highlighter +* Flexible HTML output using the Jinja 2 templating engine +* Various extensions are available, e.g. for automatic testing of snippets + and inclusion of appropriately formatted docstrings diff --git a/tests/roots/test-locale/locale1/en/LC_MESSAGES/myext.mo b/tests/roots/test-locale/locale1/en/LC_MESSAGES/myext.mo Binary files differnew file mode 100644 index 0000000..6aa00f7 --- /dev/null +++ b/tests/roots/test-locale/locale1/en/LC_MESSAGES/myext.mo diff --git a/tests/roots/test-locale/locale1/en/LC_MESSAGES/myext.po b/tests/roots/test-locale/locale1/en/LC_MESSAGES/myext.po new file mode 100644 index 0000000..ee1f6c2 --- /dev/null +++ b/tests/roots/test-locale/locale1/en/LC_MESSAGES/myext.po @@ -0,0 +1,2 @@ +msgid "Hello world" +msgstr "HELLO WORLD" diff --git a/tests/roots/test-locale/locale1/et/LC_MESSAGES/myext.mo b/tests/roots/test-locale/locale1/et/LC_MESSAGES/myext.mo Binary files differnew file mode 100644 index 0000000..c99a368 --- /dev/null +++ b/tests/roots/test-locale/locale1/et/LC_MESSAGES/myext.mo diff --git a/tests/roots/test-locale/locale1/et/LC_MESSAGES/myext.po b/tests/roots/test-locale/locale1/et/LC_MESSAGES/myext.po new file mode 100644 index 0000000..1ecf6e3 --- /dev/null +++ b/tests/roots/test-locale/locale1/et/LC_MESSAGES/myext.po @@ -0,0 +1,2 @@ +msgid "Hello world" +msgstr "Tere maailm" diff --git a/tests/roots/test-locale/locale2/en/LC_MESSAGES/myext.mo b/tests/roots/test-locale/locale2/en/LC_MESSAGES/myext.mo Binary files differnew file mode 100644 index 0000000..14c34d0 --- /dev/null +++ b/tests/roots/test-locale/locale2/en/LC_MESSAGES/myext.mo diff --git a/tests/roots/test-locale/locale2/en/LC_MESSAGES/myext.po b/tests/roots/test-locale/locale2/en/LC_MESSAGES/myext.po new file mode 100644 index 0000000..d376cf9 --- /dev/null +++ b/tests/roots/test-locale/locale2/en/LC_MESSAGES/myext.po @@ -0,0 +1,2 @@ +msgid "Hello sphinx" +msgstr "HELLO SPHINX" diff --git a/tests/roots/test-manpage_url/conf.py b/tests/roots/test-manpage_url/conf.py new file mode 100644 index 0000000..a45d22e --- /dev/null +++ b/tests/roots/test-manpage_url/conf.py @@ -0,0 +1 @@ +exclude_patterns = ['_build'] diff --git a/tests/roots/test-manpage_url/index.rst b/tests/roots/test-manpage_url/index.rst new file mode 100644 index 0000000..50d3b04 --- /dev/null +++ b/tests/roots/test-manpage_url/index.rst @@ -0,0 +1,3 @@ + * :manpage:`man(1)` + * :manpage:`ls.1` + * :manpage:`sphinx` diff --git a/tests/roots/test-markup-citation/conf.py b/tests/roots/test-markup-citation/conf.py new file mode 100644 index 0000000..e274bde --- /dev/null +++ b/tests/roots/test-markup-citation/conf.py @@ -0,0 +1,3 @@ +latex_documents = [ + ('index', 'test.tex', 'The basic Sphinx documentation for testing', 'Sphinx', 'report') +] diff --git a/tests/roots/test-markup-citation/index.rst b/tests/roots/test-markup-citation/index.rst new file mode 100644 index 0000000..238f093 --- /dev/null +++ b/tests/roots/test-markup-citation/index.rst @@ -0,0 +1,9 @@ +test-markup-citation +===================== + +This is a citation ref; [CITE1]_ and [CITE2]_. + +.. [CITE1] This is a citation + +.. [CITE2] This is + a multiline citation diff --git a/tests/roots/test-markup-rubric/conf.py b/tests/roots/test-markup-rubric/conf.py new file mode 100644 index 0000000..e274bde --- /dev/null +++ b/tests/roots/test-markup-rubric/conf.py @@ -0,0 +1,3 @@ +latex_documents = [ + ('index', 'test.tex', 'The basic Sphinx documentation for testing', 'Sphinx', 'report') +] diff --git a/tests/roots/test-markup-rubric/index.rst b/tests/roots/test-markup-rubric/index.rst new file mode 100644 index 0000000..c2ae68a --- /dev/null +++ b/tests/roots/test-markup-rubric/index.rst @@ -0,0 +1,7 @@ +test-markup-rubric +=================== + +.. rubric:: This is a rubric + +.. rubric:: This is + a multiline rubric diff --git a/tests/roots/test-maxlistdepth/conf.py b/tests/roots/test-maxlistdepth/conf.py new file mode 100644 index 0000000..a3b12a2 --- /dev/null +++ b/tests/roots/test-maxlistdepth/conf.py @@ -0,0 +1,5 @@ +exclude_patterns = ['_build'] + +latex_elements = { + 'maxlistdepth': '10', +} diff --git a/tests/roots/test-maxlistdepth/index.rst b/tests/roots/test-maxlistdepth/index.rst new file mode 100644 index 0000000..5d9bc21 --- /dev/null +++ b/tests/roots/test-maxlistdepth/index.rst @@ -0,0 +1,57 @@ +test-maxlistdepth +================= + + +1. 1 + + 1. 2 + + 1. 3 + + 1. 4 + + 1. 5 + + 1. 6 + + 1. 7 + + 1. 8 + + 1. 9 + + 10a + + - 10b + + .. code-block:: python + + def foo(): + + +- 1 + + - 2 + + - 3 + + - 4 + + - 5 + + - 6 + + - 7 + + - 8 + + 1. 9 + + 10a + + 1. 10b + + .. code-block:: python + + def foo(): + diff --git a/tests/roots/test-metadata/conf.py b/tests/roots/test-metadata/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-metadata/conf.py diff --git a/tests/roots/test-metadata/index.rst b/tests/roots/test-metadata/index.rst new file mode 100644 index 0000000..42af665 --- /dev/null +++ b/tests/roots/test-metadata/index.rst @@ -0,0 +1,46 @@ +:Author: David Goodger +:Address: 123 Example Street + Example, EX Canada + A1B 2C3 +:Contact: goodger@python.org +:Authors: Me; Myself; I +:organization: humankind +:date: $Date: 2006-05-21 22:44:42 +0200 (Son, 21 Mai 2006) $ +:status: This is a "work in progress" +:revision: $Revision: 4564 $ +:version: 1 +:copyright: This document has been placed in the public domain. You + may do with it as you wish. You may copy, modify, + redistribute, reattribute, sell, buy, rent, lease, + destroy, or improve it, quote it at length, excerpt, + incorporate, collate, fold, staple, or mutilate it, or do + anything else to it that your or anyone else's heart + desires. +:field name: This is a generic bibliographic field. +:field name 2: + Generic bibliographic fields may contain multiple body elements. + + Like this. + +:Dedication: + + For Docutils users & co-developers. + +:abstract: + + This document is a demonstration of the reStructuredText markup + language, containing examples of all basic reStructuredText + constructs and many advanced constructs. + +:nocomments: +:orphan: +:tocdepth: 1 + +.. meta:: + :keywords: reStructuredText, demonstration, demo, parser + :description lang=en: A demonstration of the reStructuredText + markup language, containing examples of all basic + constructs and many advanced constructs. + +test-metadata +============== diff --git a/tests/roots/test-need-escaped/bar.rst b/tests/roots/test-need-escaped/bar.rst new file mode 100644 index 0000000..1cccd3c --- /dev/null +++ b/tests/roots/test-need-escaped/bar.rst @@ -0,0 +1,2 @@ +bar +=== diff --git a/tests/roots/test-need-escaped/baz.rst b/tests/roots/test-need-escaped/baz.rst new file mode 100644 index 0000000..52e2e72 --- /dev/null +++ b/tests/roots/test-need-escaped/baz.rst @@ -0,0 +1,2 @@ +baz +=== diff --git a/tests/roots/test-need-escaped/conf.py b/tests/roots/test-need-escaped/conf.py new file mode 100644 index 0000000..0461ea3 --- /dev/null +++ b/tests/roots/test-need-escaped/conf.py @@ -0,0 +1,2 @@ +project = 'need <b>"escaped"</b> project' +smartquotes = False diff --git a/tests/roots/test-need-escaped/foo.rst b/tests/roots/test-need-escaped/foo.rst new file mode 100644 index 0000000..70859b3 --- /dev/null +++ b/tests/roots/test-need-escaped/foo.rst @@ -0,0 +1,15 @@ +<foo> +===== + +.. toctree:: + + quux + +foo "1" +------- + +foo.1-1 +^^^^^^^ + +foo.2 +----- diff --git a/tests/roots/test-need-escaped/index.rst b/tests/roots/test-need-escaped/index.rst new file mode 100644 index 0000000..9ef74e0 --- /dev/null +++ b/tests/roots/test-need-escaped/index.rst @@ -0,0 +1,30 @@ +.. Sphinx Tests documentation master file, created by sphinx-quickstart on Wed Jun 4 23:49:58 2008. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to Sphinx Tests's documentation! +======================================== + +Contents: + +.. toctree:: + :maxdepth: 2 + :numbered: + :caption: Table of Contents + :name: mastertoc + + foo + bar + http://sphinx-doc.org/ + baz + qux + +.. index:: + pair: "subsection"; <subsection> + +---------- +subsection +---------- + +subsubsection +------------- diff --git a/tests/roots/test-need-escaped/quux.rst b/tests/roots/test-need-escaped/quux.rst new file mode 100644 index 0000000..07dd0a0 --- /dev/null +++ b/tests/roots/test-need-escaped/quux.rst @@ -0,0 +1,2 @@ +quux +==== diff --git a/tests/roots/test-need-escaped/qux.rst b/tests/roots/test-need-escaped/qux.rst new file mode 100644 index 0000000..26176b9 --- /dev/null +++ b/tests/roots/test-need-escaped/qux.rst @@ -0,0 +1 @@ +qux.rst has no section title diff --git a/tests/roots/test-nested-enumerated-list/conf.py b/tests/roots/test-nested-enumerated-list/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-nested-enumerated-list/conf.py diff --git a/tests/roots/test-nested-enumerated-list/index.rst b/tests/roots/test-nested-enumerated-list/index.rst new file mode 100644 index 0000000..28ad72f --- /dev/null +++ b/tests/roots/test-nested-enumerated-list/index.rst @@ -0,0 +1,21 @@ +nested-enumerated-list +====================== + +5. Sphinx + + d. Documentation builder + e. Egypt + + 10) Pyramid + 11) Nile River + + (x) Atbara + (y) Blue Nile + (#) Sobat + (#) Semliki + (#) Kagera + +6. Markup + + iii. reStructuredText + iv. Markdown diff --git a/tests/roots/test-nested-tables/conf.py b/tests/roots/test-nested-tables/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-nested-tables/conf.py diff --git a/tests/roots/test-nested-tables/index.rst b/tests/roots/test-nested-tables/index.rst new file mode 100644 index 0000000..79110ae --- /dev/null +++ b/tests/roots/test-nested-tables/index.rst @@ -0,0 +1,16 @@ +nested-tables +============= + +.. list-table:: + :header-rows: 1 + + * - heading + - heading + * - content + - .. list-table:: + :header-rows: 1 + + * - heading + - heading + * - content + - content diff --git a/tests/roots/test-nitpicky-warnings/conf.py b/tests/roots/test-nitpicky-warnings/conf.py new file mode 100644 index 0000000..2db221c --- /dev/null +++ b/tests/roots/test-nitpicky-warnings/conf.py @@ -0,0 +1 @@ +nitpicky = True diff --git a/tests/roots/test-nitpicky-warnings/index.rst b/tests/roots/test-nitpicky-warnings/index.rst new file mode 100644 index 0000000..e73840d --- /dev/null +++ b/tests/roots/test-nitpicky-warnings/index.rst @@ -0,0 +1,7 @@ +test-nitpicky-warnings +====================== + +:py:const:`prefix.anything.postfix` +:py:class:`prefix.anything` +:py:class:`anything.postfix` +:js:class:`prefix.anything.postfix` diff --git a/tests/roots/test-numbered-circular/conf.py b/tests/roots/test-numbered-circular/conf.py new file mode 100644 index 0000000..a45d22e --- /dev/null +++ b/tests/roots/test-numbered-circular/conf.py @@ -0,0 +1 @@ +exclude_patterns = ['_build'] diff --git a/tests/roots/test-numbered-circular/index.rst b/tests/roots/test-numbered-circular/index.rst new file mode 100644 index 0000000..c3129cd --- /dev/null +++ b/tests/roots/test-numbered-circular/index.rst @@ -0,0 +1,5 @@ +.. toctree:: + :numbered: + + sub + diff --git a/tests/roots/test-numbered-circular/sub.rst b/tests/roots/test-numbered-circular/sub.rst new file mode 100644 index 0000000..cebfd65 --- /dev/null +++ b/tests/roots/test-numbered-circular/sub.rst @@ -0,0 +1,3 @@ +.. toctree:: + + index diff --git a/tests/roots/test-numfig/bar.rst b/tests/roots/test-numfig/bar.rst new file mode 100644 index 0000000..c4367c5 --- /dev/null +++ b/tests/roots/test-numfig/bar.rst @@ -0,0 +1,66 @@ +.. _bar: + +=== +Bar +=== + +.. _bar_a: + +Bar A +===== + +.. figure:: rimg.png + + should be Fig.2.1 + +.. csv-table:: should be Table 2.1 + :header-rows: 0 + + hello,world + +.. code-block:: python + :caption: should be List 2.1 + + print('hello world') + +.. toctree:: + + baz + +.. figure:: rimg.png + + should be Fig.2.3 + +.. csv-table:: should be Table 2.3 + :header-rows: 0 + + hello,world + +.. code-block:: python + :caption: should be List 2.3 + + print('hello world') + +.. _bar_b: + +Bar B +===== + +.. _bar_b1: + +Bar B1 +------ + +.. figure:: rimg.png + + should be Fig.2.4 + +.. csv-table:: should be Table 2.4 + :header-rows: 0 + + hello,world + +.. code-block:: python + :caption: should be List 2.4 + + print('hello world') diff --git a/tests/roots/test-numfig/baz.rst b/tests/roots/test-numfig/baz.rst new file mode 100644 index 0000000..3ac684b --- /dev/null +++ b/tests/roots/test-numfig/baz.rst @@ -0,0 +1,24 @@ +.. _baz_a: + +Baz A +----- + +.. _fig22: + +.. figure:: rimg.png + + should be Fig.2.2 + +.. _table22: + +.. csv-table:: should be Table 2.2 + :header-rows: 0 + + hello,world + +.. _CODE22: + +.. code-block:: python + :caption: should be List 2.2 + + print('hello world') diff --git a/tests/roots/test-numfig/conf.py b/tests/roots/test-numfig/conf.py new file mode 100644 index 0000000..a45d22e --- /dev/null +++ b/tests/roots/test-numfig/conf.py @@ -0,0 +1 @@ +exclude_patterns = ['_build'] diff --git a/tests/roots/test-numfig/foo.rst b/tests/roots/test-numfig/foo.rst new file mode 100644 index 0000000..6b6a865 --- /dev/null +++ b/tests/roots/test-numfig/foo.rst @@ -0,0 +1,81 @@ +.. _foo: + +=== +Foo +=== + +.. figure:: rimg.png + + should be Fig.1.1 + +.. csv-table:: should be Table 1.1 + :header-rows: 0 + + hello,world + +.. code-block:: python + :caption: should be List 1.1 + + print('hello world') + +.. _foo_a: + +Foo A +===== + +.. figure:: rimg.png + + should be Fig.1.2 + +.. figure:: rimg.png + + should be Fig.1.3 + +.. csv-table:: should be Table 1.2 + :header-rows: 0 + + hello,world + +.. csv-table:: should be Table 1.3 + :header-rows: 0 + + hello,world + +.. code-block:: python + :caption: should be List 1.2 + + print('hello world') + +.. code-block:: python + :caption: should be List 1.3 + + print('hello world') + +.. _foo_a1: + +Foo A1 +------ + +.. _foo_b: + +Foo B +===== + +.. _foo_b1: + +Foo B1 +------ + +.. figure:: rimg.png + + should be Fig.1.4 + +.. csv-table:: should be Table 1.4 + :header-rows: 0 + + hello,world + +.. code-block:: python + :caption: should be List 1.4 + + print('hello world') diff --git a/tests/roots/test-numfig/index.rst b/tests/roots/test-numfig/index.rst new file mode 100644 index 0000000..9399038 --- /dev/null +++ b/tests/roots/test-numfig/index.rst @@ -0,0 +1,59 @@ +.. _index: + +test-tocdepth +============= + +.. toctree:: + :numbered: + + foo + bar + +.. _fig1: + +.. figure:: rimg.png + + should be Fig.1 + +.. figure:: rimg.png + + should be Fig.2 + +.. _table-1: + +.. csv-table:: should be Table 1 + :header-rows: 0 + + hello,world + +.. csv-table:: should be Table 2 + :header-rows: 0 + + hello,world + +.. _CODE_1: + +.. code-block:: python + :caption: should be List 1 + + print('hello world') + +.. code-block:: python + :caption: should be List 2 + + print('hello world') + + +* Fig.1 is :numref:`fig1` +* Fig.2.2 is :numref:`Figure%s <fig22>` +* Table.1 is :numref:`table-1` +* Table.2.2 is :numref:`Table:%s <table22>` +* List.1 is :numref:`CODE_1` +* List.2.2 is :numref:`Code-%s <CODE22>` +* Section.1 is :numref:`foo` +* Section.2.1 is :numref:`bar_a` +* Unnumbered section is :numref:`index` +* Invalid numfig_format 01: :numref:`invalid <fig1>` +* Invalid numfig_format 02: :numref:`Fig %s %s <fig1>` +* Fig.1 is :numref:`Fig.{number} {name} <fig1>` +* Section.1 is :numref:`Sect.{number} {name} <foo>` diff --git a/tests/roots/test-numfig/rimg.png b/tests/roots/test-numfig/rimg.png Binary files differnew file mode 100644 index 0000000..fda6cd2 --- /dev/null +++ b/tests/roots/test-numfig/rimg.png diff --git a/tests/roots/test-object-description-sections/conf.py b/tests/roots/test-object-description-sections/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-object-description-sections/conf.py diff --git a/tests/roots/test-object-description-sections/index.rst b/tests/roots/test-object-description-sections/index.rst new file mode 100644 index 0000000..1892f94 --- /dev/null +++ b/tests/roots/test-object-description-sections/index.rst @@ -0,0 +1,6 @@ +.. py:function:: func() + + Overview + -------- + + Lorem ipsum dolar sit amet diff --git a/tests/roots/test-productionlist/Bare.rst b/tests/roots/test-productionlist/Bare.rst new file mode 100644 index 0000000..8ea9213 --- /dev/null +++ b/tests/roots/test-productionlist/Bare.rst @@ -0,0 +1,6 @@ +Bare +==== + +.. productionlist:: + A: `A` | somethingA + B: `B` | somethingB diff --git a/tests/roots/test-productionlist/Dup1.rst b/tests/roots/test-productionlist/Dup1.rst new file mode 100644 index 0000000..5cd09cb --- /dev/null +++ b/tests/roots/test-productionlist/Dup1.rst @@ -0,0 +1,5 @@ +Dup1 +==== + +.. productionlist:: + Dup: `Dup` | somethingDup diff --git a/tests/roots/test-productionlist/Dup2.rst b/tests/roots/test-productionlist/Dup2.rst new file mode 100644 index 0000000..1d66375 --- /dev/null +++ b/tests/roots/test-productionlist/Dup2.rst @@ -0,0 +1,5 @@ +Dup2 +==== + +.. productionlist:: + Dup: `Dup` | somethingDup diff --git a/tests/roots/test-productionlist/LineContinuation.rst b/tests/roots/test-productionlist/LineContinuation.rst new file mode 100644 index 0000000..4943e8b --- /dev/null +++ b/tests/roots/test-productionlist/LineContinuation.rst @@ -0,0 +1,6 @@ +LineContinuation +================ + +.. productionlist:: lineContinuation + A: B C D \ + E F G diff --git a/tests/roots/test-productionlist/P1.rst b/tests/roots/test-productionlist/P1.rst new file mode 100644 index 0000000..6f9a863 --- /dev/null +++ b/tests/roots/test-productionlist/P1.rst @@ -0,0 +1,6 @@ +P1 +== + +.. productionlist:: P1 + A: `A` | somethingA + B: `B` | somethingB diff --git a/tests/roots/test-productionlist/P2.rst b/tests/roots/test-productionlist/P2.rst new file mode 100644 index 0000000..e6c3bc1 --- /dev/null +++ b/tests/roots/test-productionlist/P2.rst @@ -0,0 +1,6 @@ +P2 +== + +.. productionlist:: P2 + A: `A` | somethingA + B: `B` | somethingB diff --git a/tests/roots/test-productionlist/conf.py b/tests/roots/test-productionlist/conf.py new file mode 100644 index 0000000..a45d22e --- /dev/null +++ b/tests/roots/test-productionlist/conf.py @@ -0,0 +1 @@ +exclude_patterns = ['_build'] diff --git a/tests/roots/test-productionlist/firstLineRule.rst b/tests/roots/test-productionlist/firstLineRule.rst new file mode 100644 index 0000000..30ea6e0 --- /dev/null +++ b/tests/roots/test-productionlist/firstLineRule.rst @@ -0,0 +1,5 @@ +FirstLineRule +============= + +.. productionlist:: FirstLine: something + SecondLine: somethingElse diff --git a/tests/roots/test-productionlist/index.rst b/tests/roots/test-productionlist/index.rst new file mode 100644 index 0000000..4a0b978 --- /dev/null +++ b/tests/roots/test-productionlist/index.rst @@ -0,0 +1,27 @@ +.. toctree:: + + P1 + P2 + Bare + Dup1 + Dup2 + firstLineRule + LineContinuation + +- A: :token:`A` +- B: :token:`B` +- P1:A: :token:`P1:A` +- P1:B: :token:`P1:B` +- P2:A: :token:`P1:A` +- P2:B: :token:`P2:B` +- Explicit title A, plain: :token:`MyTitle <A>` +- Explicit title A, colon: :token:`My:Title <A>` +- Explicit title P1:A, plain: :token:`MyTitle <P1:A>` +- Explicit title P1:A, colon: :token:`My:Title <P1:A>` +- Tilde A: :token:`~A`. +- Tilde P1:A: :token:`~P1:A`. +- Tilde explicit title P1:A: :token:`~MyTitle <P1:A>` +- Tilde, explicit title P1:A: :token:`MyTitle <~P1:A>` +- Dup: :token:`Dup` +- FirstLine: :token:`FirstLine` +- SecondLine: :token:`SecondLine` diff --git a/tests/roots/test-prolog/conf.py b/tests/roots/test-prolog/conf.py new file mode 100644 index 0000000..f6be09c --- /dev/null +++ b/tests/roots/test-prolog/conf.py @@ -0,0 +1,10 @@ +import os +import sys + +sys.path.insert(0, os.path.abspath('.')) + + +extensions = ['prolog_markdown_parser'] + +rst_prolog = '*Hello world*.\n\n' +rst_epilog = '\n\n*Good-bye world*.' diff --git a/tests/roots/test-prolog/index.rst b/tests/roots/test-prolog/index.rst new file mode 100644 index 0000000..2178d73 --- /dev/null +++ b/tests/roots/test-prolog/index.rst @@ -0,0 +1,7 @@ +prolog and epilog +================= + +.. toctree:: + + restructuredtext + markdown diff --git a/tests/roots/test-prolog/markdown.md b/tests/roots/test-prolog/markdown.md new file mode 100644 index 0000000..e400720 --- /dev/null +++ b/tests/roots/test-prolog/markdown.md @@ -0,0 +1,3 @@ +# sample document + +This is a sample document in markdown diff --git a/tests/roots/test-prolog/prolog_markdown_parser.py b/tests/roots/test-prolog/prolog_markdown_parser.py new file mode 100644 index 0000000..f8d787c --- /dev/null +++ b/tests/roots/test-prolog/prolog_markdown_parser.py @@ -0,0 +1,13 @@ +from docutils.parsers import Parser + + +class DummyMarkdownParser(Parser): + supported = ('markdown',) + + def parse(self, inputstring, document): + document.rawsource = inputstring + + +def setup(app): + app.add_source_suffix('.md', 'markdown') + app.add_source_parser(DummyMarkdownParser) diff --git a/tests/roots/test-prolog/restructuredtext.rst b/tests/roots/test-prolog/restructuredtext.rst new file mode 100644 index 0000000..f1fafb6 --- /dev/null +++ b/tests/roots/test-prolog/restructuredtext.rst @@ -0,0 +1,4 @@ +sample document +=============== + +This is a sample document in reST diff --git a/tests/roots/test-pycode/cp_1251_coded.py b/tests/roots/test-pycode/cp_1251_coded.py new file mode 100644 index 0000000..43d98f3 --- /dev/null +++ b/tests/roots/test-pycode/cp_1251_coded.py @@ -0,0 +1,4 @@ +#!python
+# -*- coding: windows-1251 -*-
+
+X="" #:It MUST look like X=""
\ No newline at end of file diff --git a/tests/roots/test-reST-code-block/conf.py b/tests/roots/test-reST-code-block/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-reST-code-block/conf.py diff --git a/tests/roots/test-reST-code-block/index.rst b/tests/roots/test-reST-code-block/index.rst new file mode 100644 index 0000000..a7c7df0 --- /dev/null +++ b/tests/roots/test-reST-code-block/index.rst @@ -0,0 +1,7 @@ +.. code-block:: + :linenos: + + def hello(name) + print("hello", name) + + hello("Sphinx") diff --git a/tests/roots/test-reST-code-role/conf.py b/tests/roots/test-reST-code-role/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-reST-code-role/conf.py diff --git a/tests/roots/test-reST-code-role/index.rst b/tests/roots/test-reST-code-role/index.rst new file mode 100644 index 0000000..5be6bfc --- /dev/null +++ b/tests/roots/test-reST-code-role/index.rst @@ -0,0 +1,9 @@ +.. role:: python(code) + :language: python + :class: highlight + +Inline :python:`def foo(1 + 2 + None + "abc"): pass` code block + +.. code-block:: python + + def foo(1 + 2 + None + "abc"): pass diff --git a/tests/roots/test-refonly_bullet_list/conf.py b/tests/roots/test-refonly_bullet_list/conf.py new file mode 100644 index 0000000..bdccf9c --- /dev/null +++ b/tests/roots/test-refonly_bullet_list/conf.py @@ -0,0 +1 @@ +html_compact_lists = False diff --git a/tests/roots/test-refonly_bullet_list/index.rst b/tests/roots/test-refonly_bullet_list/index.rst new file mode 100644 index 0000000..9d8539d --- /dev/null +++ b/tests/roots/test-refonly_bullet_list/index.rst @@ -0,0 +1,14 @@ +test-refonly_bullet_list +======================== + +List A: + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + +List B: + +* Hello +* Sphinx +* World diff --git a/tests/roots/test-remote-logo/conf.py b/tests/roots/test-remote-logo/conf.py new file mode 100644 index 0000000..07949ba --- /dev/null +++ b/tests/roots/test-remote-logo/conf.py @@ -0,0 +1,5 @@ +latex_documents = [ + ('index', 'test.tex', 'The basic Sphinx documentation for testing', 'Sphinx', 'report') +] +html_logo = "https://www.python.org/static/img/python-logo.png" +html_favicon = "https://www.python.org/static/favicon.ico" diff --git a/tests/roots/test-remote-logo/index.rst b/tests/roots/test-remote-logo/index.rst new file mode 100644 index 0000000..af12ed6 --- /dev/null +++ b/tests/roots/test-remote-logo/index.rst @@ -0,0 +1,31 @@ +The basic Sphinx documentation for testing +========================================== + +Sphinx is a tool that makes it easy to create intelligent and beautiful +documentation for Python projects (or other documents consisting of multiple +reStructuredText sources), written by Georg Brandl. It was originally created +for the new Python documentation, and has excellent facilities for Python +project documentation, but C/C++ is supported as well, and more languages are +planned. + +Sphinx uses reStructuredText as its markup language, and many of its strengths +come from the power and straightforwardness of reStructuredText and its parsing +and translating suite, the Docutils. + +features +-------- + +Among its features are the following: + +* Output formats: HTML (including derivative formats such as HTML Help, Epub + and Qt Help), plain text, manual pages and LaTeX or direct PDF output + using rst2pdf +* Extensive cross-references: semantic markup and automatic links + for functions, classes, glossary terms and similar pieces of information +* Hierarchical structure: easy definition of a document tree, with automatic + links to siblings, parents and children +* Automatic indices: general index as well as a module index +* Code handling: automatic highlighting using the Pygments highlighter +* Flexible HTML output using the Jinja 2 templating engine +* Various extensions are available, e.g. for automatic testing of snippets + and inclusion of appropriately formatted docstrings diff --git a/tests/roots/test-roles-download/another/dummy.dat b/tests/roots/test-roles-download/another/dummy.dat new file mode 100644 index 0000000..f6d9fed --- /dev/null +++ b/tests/roots/test-roles-download/another/dummy.dat @@ -0,0 +1 @@ +this one will have some content diff --git a/tests/roots/test-roles-download/conf.py b/tests/roots/test-roles-download/conf.py new file mode 100644 index 0000000..e274bde --- /dev/null +++ b/tests/roots/test-roles-download/conf.py @@ -0,0 +1,3 @@ +latex_documents = [ + ('index', 'test.tex', 'The basic Sphinx documentation for testing', 'Sphinx', 'report') +] diff --git a/tests/roots/test-roles-download/dummy.dat b/tests/roots/test-roles-download/dummy.dat new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-roles-download/dummy.dat diff --git a/tests/roots/test-roles-download/index.rst b/tests/roots/test-roles-download/index.rst new file mode 100644 index 0000000..cdb075e --- /dev/null +++ b/tests/roots/test-roles-download/index.rst @@ -0,0 +1,7 @@ +test-roles-download +=================== + +* :download:`dummy.dat` +* :download:`another/dummy.dat` +* :download:`not_found.dat` +* :download:`Sphinx logo <http://www.sphinx-doc.org/en/master/_static/sphinxheader.png>` diff --git a/tests/roots/test-root/Makefile b/tests/roots/test-root/Makefile new file mode 100644 index 0000000..85a93bc --- /dev/null +++ b/tests/roots/test-root/Makefile @@ -0,0 +1,67 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build + +# Internal variables. +ALLSPHINXOPTS = -d _build/doctrees $(SPHINXOPTS) . + +.PHONY: help clean html web pickle htmlhelp latex changes linkcheck + +help: + @echo "Please use \`make <target>' where <target> is one of" + @echo " html to make standalone HTML files" + @echo " pickle to make pickle files (usable by e.g. sphinx-web)" + @echo " htmlhelp to make HTML files and an HTML help project" + @echo " latex to make LaTeX files" + @echo " changes to make an overview over all changed/added/deprecated items" + @echo " linkcheck to check all external links for integrity" + +clean: + rm -rf _build/* + +html: + mkdir -p _build/html _build/doctrees + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) _build/html + @echo + @echo "Build finished. The HTML pages are in _build/html." + +pickle: + mkdir -p _build/pickle _build/doctrees + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) _build/pickle + @echo + @echo "Build finished; now you can process the pickle files or run" + @echo " sphinx-web _build/pickle" + @echo "to start the sphinx-web server." + +web: pickle + +htmlhelp: + mkdir -p _build/htmlhelp _build/doctrees + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) _build/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in _build/htmlhelp." + +latex: + mkdir -p _build/latex _build/doctrees + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) _build/latex + @echo + @echo "Build finished; the LaTeX files are in _build/latex." + @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ + "run these through (pdf)latex." + +changes: + mkdir -p _build/changes _build/doctrees + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) _build/changes + @echo + @echo "The overview file is in _build/changes." + +linkcheck: + mkdir -p _build/linkcheck _build/doctrees + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) _build/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in _build/linkcheck/output.txt." diff --git a/tests/roots/test-root/_templates/contentssb.html b/tests/roots/test-root/_templates/contentssb.html new file mode 100644 index 0000000..d0c276a --- /dev/null +++ b/tests/roots/test-root/_templates/contentssb.html @@ -0,0 +1,2 @@ +{# sidebar only for contents document #} +<h4>Contents sidebar</h4> diff --git a/tests/roots/test-root/_templates/customsb.html b/tests/roots/test-root/_templates/customsb.html new file mode 100644 index 0000000..0cd6735 --- /dev/null +++ b/tests/roots/test-root/_templates/customsb.html @@ -0,0 +1,4 @@ +{# custom sidebar template #} +<h4>Custom sidebar</h4> + +{{ toctree(titles_only=True, maxdepth=1) }} diff --git a/tests/roots/test-root/_templates/layout.html b/tests/roots/test-root/_templates/layout.html new file mode 100644 index 0000000..db685b0 --- /dev/null +++ b/tests/roots/test-root/_templates/layout.html @@ -0,0 +1,15 @@ +{% extends "!layout.html" %} + +{% block extrahead %} +{# html_context variable from conf.py #} +<meta name="hc" content="{{ hckey }}" /> +{# html_context variable from confoverrides (as if given on cmdline) #} +<meta name="hc_co" content="{{ hckey_co }}" /> +{{ super() }} +{% endblock %} + +{% block sidebartoc %} +{# display global TOC in addition to local TOC #} +{{ super() }} +{{ toctree(collapse=False, maxdepth=-1) }} +{% endblock %} diff --git a/tests/roots/test-root/autodoc.txt b/tests/roots/test-root/autodoc.txt new file mode 100644 index 0000000..959ab2d --- /dev/null +++ b/tests/roots/test-root/autodoc.txt @@ -0,0 +1,39 @@ +Autodoc tests +============= + +Just testing a few autodoc possibilities... + +.. automodule:: autodoc_target + :members: + +.. autofunction:: function + +.. autoclass:: Class + :inherited-members: + + Additional content. + +.. autoclass:: Outer + :members: Inner + +.. autoattribute:: Class.docattr + +.. autoexception:: CustomEx + :members: f + +.. autoclass:: CustomDict + :show-inheritance: + :members: + + +.. currentmodule:: autodoc_target + +.. autoclass:: InstAttCls + :members: + + All members (5 total) + +.. autoclass:: InstAttCls + :members: ca1, ia1 + + Specific members (2 total) diff --git a/tests/roots/test-root/autodoc_target.py b/tests/roots/test-root/autodoc_target.py new file mode 100644 index 0000000..59f6c74 --- /dev/null +++ b/tests/roots/test-root/autodoc_target.py @@ -0,0 +1,221 @@ +import enum +from io import StringIO + +__all__ = ['Class'] + +#: documentation for the integer +integer = 1 + + +def raises(exc, func, *args, **kwds): + """Raise AssertionError if ``func(*args, **kwds)`` does not raise *exc*.""" + pass + + +class CustomEx(Exception): + """My custom exception.""" + + def f(self): + """Exception method.""" + + +class CustomDataDescriptor: + """Descriptor class docstring.""" + + def __init__(self, doc): + self.__doc__ = doc + + def __get__(self, obj, type=None): + if obj is None: + return self + return 42 + + def meth(self): + """Function.""" + return "The Answer" + + +class CustomDataDescriptorMeta(type): + """Descriptor metaclass docstring.""" + + +class CustomDataDescriptor2(CustomDataDescriptor): + """Descriptor class with custom metaclass docstring.""" + __metaclass__ = CustomDataDescriptorMeta + + +def _funky_classmethod(name, b, c, d, docstring=None): + """Generates a classmethod for a class from a template by filling out + some arguments.""" + def template(cls, a, b, c, d=4, e=5, f=6): + return a, b, c, d, e, f + from functools import partial + function = partial(template, b=b, c=c, d=d) + function.__name__ = name + function.__doc__ = docstring + return classmethod(function) + + +class Base: + def inheritedmeth(self): + """Inherited function.""" + + +class Derived(Base): + def inheritedmeth(self): + # no docstring here + pass + + +class Class(Base): + """Class to document.""" + + descr = CustomDataDescriptor("Descriptor instance docstring.") + + def meth(self): + """Function.""" + + def undocmeth(self): + pass + + def skipmeth(self): + """Method that should be skipped.""" + + def excludemeth(self): + """Method that should be excluded.""" + + # should not be documented + skipattr = 'foo' + + #: should be documented -- süß + attr = 'bar' + + @property + def prop(self): + """Property.""" + + docattr = 'baz' + """should likewise be documented -- süß""" + + udocattr = 'quux' + """should be documented as well - süß""" + + # initialized to any class imported from another module + mdocattr = StringIO() + """should be documented as well - süß""" + + roger = _funky_classmethod("roger", 2, 3, 4) + + moore = _funky_classmethod("moore", 9, 8, 7, + docstring="moore(a, e, f) -> happiness") + + def __init__(self, arg): + self.inst_attr_inline = None #: an inline documented instance attr + #: a documented instance attribute + self.inst_attr_comment = None + self.inst_attr_string = None + """a documented instance attribute""" + self._private_inst_attr = None #: a private instance attribute + + def __special1__(self): + """documented special method""" + + def __special2__(self): + # undocumented special method + pass + + +class CustomDict(dict): + """Docstring.""" + + +def function(foo, *args, **kwds): + """ + Return spam. + """ + pass + + +class Outer: + """Foo""" + + class Inner: + """Foo""" + + def meth(self): + """Foo""" + + # should be documented as an alias + factory = dict + + +class DocstringSig: + def meth(self): + """meth(FOO, BAR=1) -> BAZ +First line of docstring + + rest of docstring + """ + + def meth2(self): + """First line, no signature + Second line followed by indentation:: + + indented line + """ + + @property + def prop1(self): + """DocstringSig.prop1(self) + First line of docstring + """ + return 123 + + @property + def prop2(self): + """First line of docstring + Second line of docstring + """ + return 456 + + +class StrRepr(str): + def __repr__(self): + return self + + +class AttCls: + a1 = StrRepr('hello\nworld') + a2 = None + + +class InstAttCls: + """Class with documented class and instance attributes.""" + + #: Doc comment for class attribute InstAttCls.ca1. + #: It can have multiple lines. + ca1 = 'a' + + ca2 = 'b' #: Doc comment for InstAttCls.ca2. One line only. + + ca3 = 'c' + """Docstring for class attribute InstAttCls.ca3.""" + + def __init__(self): + #: Doc comment for instance attribute InstAttCls.ia1 + self.ia1 = 'd' + + self.ia2 = 'e' + """Docstring for instance attribute InstAttCls.ia2.""" + + +class EnumCls(enum.Enum): + """ + this is enum class + """ + + #: doc for val1 + val1 = 12 + val2 = 23 #: doc for val2 + val3 = 34 + """doc for val3""" diff --git a/tests/roots/test-root/bom.txt b/tests/roots/test-root/bom.txt new file mode 100644 index 0000000..3fea824 --- /dev/null +++ b/tests/roots/test-root/bom.txt @@ -0,0 +1,5 @@ +File with UTF-8 BOM +=================== + +This file has a UTF-8 "BOM". + diff --git a/tests/roots/test-root/conf.py b/tests/roots/test-root/conf.py new file mode 100644 index 0000000..154d4d1 --- /dev/null +++ b/tests/roots/test-root/conf.py @@ -0,0 +1,148 @@ +import os +import sys + +from docutils import nodes +from docutils.parsers.rst import Directive + +from sphinx import addnodes + +sys.path.append(os.path.abspath('.')) + +extensions = ['sphinx.ext.autodoc', + 'sphinx.ext.todo', + 'sphinx.ext.coverage', + 'sphinx.ext.extlinks'] + +jsmath_path = 'dummy.js' + +templates_path = ['_templates'] + +source_suffix = ['.txt', '.add', '.foo'] + +project = 'Sphinx <Tests>' +copyright = '1234-6789, copyright text credits' +# If this is changed, remember to update the versionchanges! +version = '0.6' +release = '0.6alpha1' +today_fmt = '%B %d, %Y' +exclude_patterns = ['_build', '**/excluded.*'] +keep_warnings = True +pygments_style = 'sphinx' +show_authors = True +numfig = True + +html_sidebars = {'**': ['localtoc.html', 'relations.html', 'sourcelink.html', + 'customsb.html', 'searchbox.html'], + 'index': ['contentssb.html', 'localtoc.html', 'globaltoc.html']} +html_last_updated_fmt = '%b %d, %Y' +html_context = {'hckey': 'hcval', 'hckey_co': 'wrong_hcval_co'} + +latex_additional_files = ['svgimg.svg'] +# some random pdf layout parameters to check they don't break build +latex_elements = { + 'sphinxsetup': """ + verbatimwithframe, + verbatimwrapslines, + verbatimforcewraps, + verbatimmaxoverfull=1, + verbatimmaxunderfull=5, + verbatimhintsturnover=true, + verbatimcontinuesalign=l, + VerbatimColor={RGB}{242,242,242}, + VerbatimBorderColor={RGB}{32,32,32}, + VerbatimHighlightColor={RGB}{200,200,200}, + pre_box-decoration-break=slice, + pre_border-top-left-radius=20pt, + pre_border-top-right-radius=0pt, + pre_border-bottom-right-radius=20pt, + pre_border-bottom-left-radius=0pt, + verbatimsep=1pt, + pre_padding=5pt,% alias to verbatimsep + pre_border-top-width=5pt, + pre_border-right-width=10pt, + pre_border-bottom-width=15pt, + pre_border-left-width=20pt, + pre_border-width=3pt,% overrides all previous four + verbatimborder=2pt,% alias to pre_border-width +% + shadowrule=1pt, + shadowsep=10pt, + shadowsize=10pt, + div.topic_border-width=2pt,% alias to shadowrule + div.topic_padding=6pt,% alias to shadowsep + div.topic_box-shadow=5pt,% overrides/alias shadowsize +% + noteBorderColor={RGB}{204,204,204}, + hintBorderColor={RGB}{204,204,204}, + importantBorderColor={RGB}{204,204,204}, + tipBorderColor={RGB}{204,204,204}, +% + noteborder=5pt, + hintborder=5pt, + importantborder=5pt, + tipborder=5pt, +% + warningborder=3pt, + cautionborder=3pt, + attentionborder=3pt, + errorborder=3pt, +% + dangerborder=3pt, + div.danger_border-width=10pt, + div.danger_background-TeXcolor={rgb}{0,1,0}, + div.danger_border-TeXcolor={rgb}{0,0,1}, + div.danger_box-shadow=20pt -20pt, + div.danger_box-shadow-TeXcolor={rgb}{0.5,0.5,0.5}, +% + warningBorderColor={RGB}{255,119,119}, + cautionBorderColor={RGB}{255,119,119}, + attentionBorderColor={RGB}{255,119,119}, + dangerBorderColor={RGB}{255,119,119}, + errorBorderColor={RGB}{255,119,119}, + warningBgColor={RGB}{255,238,238}, + cautionBgColor={RGB}{255,238,238}, + attentionBgColor={RGB}{255,238,238}, + dangerBgColor={RGB}{255,238,238}, + errorBgColor={RGB}{255,238,238}, +% + TableRowColorHeader={rgb}{0,1,0}, + TableRowColorOdd={rgb}{0.5,0,0}, + TableRowColorEven={rgb}{0.1,0.1,0.1}, +""", +} + +coverage_c_path = ['special/*.h'] +coverage_c_regexes = {'function': r'^PyAPI_FUNC\(.*\)\s+([^_][\w_]+)'} + +extlinks = {'issue': ('http://bugs.python.org/issue%s', 'issue %s'), + 'pyurl': ('http://python.org/%s', None)} + +# modify tags from conf.py +tags.add('confpytag') + + +# -- extension API +def userdesc_parse(env, sig, signode): + x, y = sig.split(':') + signode += addnodes.desc_name(x, x) + signode += addnodes.desc_parameterlist() + signode[-1] += addnodes.desc_parameter(y, y) + return x + + +class ClassDirective(Directive): + option_spec = {'opt': lambda x: x} + + def run(self): + return [nodes.strong(text='from class: %s' % self.options['opt'])] + + +def setup(app): + import parsermod + + app.add_directive('clsdir', ClassDirective) + app.add_object_type('userdesc', 'userdescrole', '%s (userdesc)', + userdesc_parse, objname='user desc') + app.add_js_file('file://moo.js') + app.add_source_suffix('.foo', 'foo') + app.add_source_parser(parsermod.Parser) diff --git a/tests/roots/test-root/extapi.txt b/tests/roots/test-root/extapi.txt new file mode 100644 index 0000000..56be6d8 --- /dev/null +++ b/tests/roots/test-root/extapi.txt @@ -0,0 +1,7 @@ +Extension API tests +=================== + +Testing directives: + +.. clsdir:: + :opt: Bar diff --git a/tests/roots/test-root/extensions.txt b/tests/roots/test-root/extensions.txt new file mode 100644 index 0000000..96b1f8e --- /dev/null +++ b/tests/roots/test-root/extensions.txt @@ -0,0 +1,28 @@ +Test for diverse extensions +=========================== + +extlinks +-------- + +Test diverse links: :issue:`1000` and :pyurl:`dev/`, also with +:issue:`explicit caption <1042>`. + + +todo +---- + +.. todo:: + + Test the todo extension. + +.. todo:: + + Test with |sub| (see #286). + +.. |sub| replace:: substitution references + + +list of all todos +^^^^^^^^^^^^^^^^^ + +.. todolist:: diff --git a/tests/roots/test-root/file_with_special_#_chars.xyz b/tests/roots/test-root/file_with_special_#_chars.xyz new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-root/file_with_special_#_chars.xyz diff --git a/tests/roots/test-root/footnote.txt b/tests/roots/test-root/footnote.txt new file mode 100644 index 0000000..49cceee --- /dev/null +++ b/tests/roots/test-root/footnote.txt @@ -0,0 +1,60 @@ +:tocdepth: 2 + +Testing footnote and citation +================================ +.. #1058 footnote-backlinks-do-not-work + +numbered footnote +-------------------- + +[1]_ + +auto-numbered footnote +------------------------------ + +[#]_ + +named footnote +-------------------- + +[#foo]_ + +citation +-------------------- + +[bar]_ +[baz_qux]_ + +footnotes in table +-------------------- + +.. list-table:: Table caption [#]_ + :header-rows: 1 + + * - name [#]_ + - description + * - VIDIOC_CROPCAP + - Information about VIDIOC_CROPCAP [#]_ + +footenotes +-------------------- + +.. rubric:: Footnotes + +.. [1] numbered + +.. [#] auto numbered + +.. [#foo] named + +.. rubric:: Citations + +.. [bar] cite + +.. [baz_qux] citation including underscore + +.. [#] footnote in table caption + +.. [#] footnote in table header + +.. [#] footnote in table not in header diff --git a/tests/roots/test-root/images.txt b/tests/roots/test-root/images.txt new file mode 100644 index 0000000..1dc591a --- /dev/null +++ b/tests/roots/test-root/images.txt @@ -0,0 +1,25 @@ +Sphinx image handling +===================== + +.. first, a simple test with direct filename +.. image:: img.png + +.. an image with path name (relative to this directory!) +.. image:: subdir/img.png + :height: 100 + :width: 200 + +.. an image with unspecified extension +.. image:: img.* + +.. a non-local image URI +.. image:: https://www.python.org/static/img/python-logo.png + +.. an image with subdir and unspecified extension +.. image:: subdir/simg.* + +.. an SVG image (for HTML at least) +.. image:: svgimg.* + +.. an image with more than 1 dot in its file name +.. image:: img.foo.png diff --git a/tests/roots/test-root/img.foo.png b/tests/roots/test-root/img.foo.png Binary files differnew file mode 100644 index 0000000..a97e86d --- /dev/null +++ b/tests/roots/test-root/img.foo.png diff --git a/tests/roots/test-root/img.gif b/tests/roots/test-root/img.gif Binary files differnew file mode 100644 index 0000000..8f02686 --- /dev/null +++ b/tests/roots/test-root/img.gif diff --git a/tests/roots/test-root/img.pdf b/tests/roots/test-root/img.pdf Binary files differnew file mode 100644 index 0000000..cacbd85 --- /dev/null +++ b/tests/roots/test-root/img.pdf diff --git a/tests/roots/test-root/img.png b/tests/roots/test-root/img.png Binary files differnew file mode 100644 index 0000000..a97e86d --- /dev/null +++ b/tests/roots/test-root/img.png diff --git a/tests/roots/test-root/includes.txt b/tests/roots/test-root/includes.txt new file mode 100644 index 0000000..19b98ae --- /dev/null +++ b/tests/roots/test-root/includes.txt @@ -0,0 +1,102 @@ +Testing downloadable files +========================== + +Download :download:`img.png` here. +Download :download:`this <subdir/img.png>` there. +Download :download:`file with special characters <file_with_special_#_chars.xyz>`. + +Test file and literal inclusion +=============================== + +.. include:: subdir/include.inc + +.. include:: /subdir/include.inc + +.. literalinclude:: literal.inc + :language: python + +.. should succeed +.. literalinclude:: wrongenc.inc + :encoding: latin-1 + :language: none +.. include:: wrongenc.inc + :encoding: latin-1 + +Literalinclude options +====================== + +.. highlight:: text + +.. cssclass:: inc-pyobj1 +.. literalinclude:: literal.inc + :pyobject: Foo + +.. cssclass:: inc-pyobj2 +.. literalinclude:: literal.inc + :pyobject: Bar.baz + +.. cssclass:: inc-lines +.. literalinclude:: literal.inc + :lines: 6-7,9 + :lineno-start: 6 + +.. cssclass:: inc-startend +.. literalinclude:: literal.inc + :start-after: coding: utf-8 + :end-before: class Foo + +.. cssclass:: inc-preappend +.. literalinclude:: literal.inc + :prepend: START CODE + :append: END CODE + +.. literalinclude:: literal.inc + :start-after: utf-8 + +.. literalinclude:: literal.inc + :end-before: class Foo + +.. literalinclude:: literal.inc + :diff: literal_orig.inc + +.. cssclass:: inc-tab3 +.. literalinclude:: tabs.inc + :tab-width: 3 + :language: text + +.. cssclass:: inc-tab8 +.. literalinclude:: tabs.inc + :tab-width: 8 + :language: python + +.. cssclass:: inc-pyobj-lines-match +.. literalinclude:: literal.inc + :pyobject: Foo + :lineno-match: + +.. cssclass:: inc-lines-match +.. literalinclude:: literal.inc + :lines: 6-7,8 + :lineno-match: + +.. cssclass:: inc-startend-match +.. literalinclude:: literal.inc + :start-after: coding: utf-8 + :end-before: class Foo + :lineno-match: + +Test if dedenting before parsing works. + +.. highlight:: python + +.. cssclass:: inc-pyobj-dedent +.. literalinclude:: literal.inc + :pyobject: Bar.baz + +Docutils include with "literal" +=============================== + +While not recommended, it should work (and leave quotes alone). + +.. include:: quotes.inc + :literal: diff --git a/tests/roots/test-root/index.txt b/tests/roots/test-root/index.txt new file mode 100644 index 0000000..e39c958 --- /dev/null +++ b/tests/roots/test-root/index.txt @@ -0,0 +1,65 @@ +.. Sphinx Tests documentation master file, created by sphinx-quickstart on Wed Jun 4 23:49:58 2008. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to Sphinx Tests's documentation! +======================================== + +Contents: + +.. toctree:: + :maxdepth: 2 + :numbered: + :caption: Table of Contents + :name: mastertoc + + extapi + images + subdir/images + subdir/includes + includes + markup + objects + bom + math + autodoc + extensions + footnote + lists + otherext + + http://sphinx-doc.org/ + Latest reference <http://sphinx-doc.org/latest/> + Python <http://python.org/> + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` + +References +========== + +.. [Ref1] Reference target. +.. [Ref_1] Reference target 2. + +Test for issue #1157 +==================== + +This used to crash: + +.. toctree:: + +.. toctree:: + :hidden: + +Test for issue #1700 +==================== + +:ref:`mastertoc` + +Test for indirect hyperlink targets +=================================== + +:ref:`indirect hyperref <other-label>` diff --git a/tests/roots/test-root/lists.txt b/tests/roots/test-root/lists.txt new file mode 100644 index 0000000..0b54454 --- /dev/null +++ b/tests/roots/test-root/lists.txt @@ -0,0 +1,70 @@ +Various kinds of lists +====================== + + +nested enumerated lists +----------------------- + +#. one + +#. two + + #. two.1 + #. two.2 + +#. three + + +enumerated lists with non-default start values +---------------------------------------------- + +0. zero +#. one + +---------------------------------------- + +1. one +#. two + +---------------------------------------- + +2. two +#. three + + +enumerated lists using letters +------------------------------ + +a. a + +b. b + +#. c + +#. d + +---------------------------------------- + +x. x + +y. y + +#. z + +#. { + +definition lists +----------------- + +term1 + description + +term2 (**stronged partially**) + description + +Samp tests +---------- + +:samp:`{variable_only}` +:samp:`{variable} and text` +:samp:`Show {variable} in the middle` diff --git a/tests/roots/test-root/literal.inc b/tests/roots/test-root/literal.inc new file mode 100644 index 0000000..694f15e --- /dev/null +++ b/tests/roots/test-root/literal.inc @@ -0,0 +1,13 @@ +# Literally included file using Python highlighting +# -*- coding: utf-8 -*- + +foo = "Including Unicode characters: üöä" + +class Foo: + pass + +class Bar: + def baz(): + pass + +def bar(): pass diff --git a/tests/roots/test-root/literal_orig.inc b/tests/roots/test-root/literal_orig.inc new file mode 100644 index 0000000..cfb5dd1 --- /dev/null +++ b/tests/roots/test-root/literal_orig.inc @@ -0,0 +1,12 @@ +# Literally included file using Python highlighting + +foo = "Including Unicode characters: üöä" # This will be changed + +class FooOrig: + pass + +class BarOrig: + def baz(): + pass + +def bar(): pass diff --git a/tests/roots/test-root/markup.txt b/tests/roots/test-root/markup.txt new file mode 100644 index 0000000..b59a652 --- /dev/null +++ b/tests/roots/test-root/markup.txt @@ -0,0 +1,455 @@ +:tocdepth: 2 + +.. title:: set by title directive +.. _1024: + +Testing various markup +====================== + +Meta markup +----------- + +.. sectionauthor:: Georg Brandl +.. moduleauthor:: Georg Brandl + +.. contents:: TOC + +.. meta:: + :author: Me + :keywords: docs, sphinx + + +Generic reST +------------ + +A |subst|! + +.. |subst| replace:: global substitution + +.. highlight:: none + +.. _label: + +:: + + some code + +Option list: + +-h help +--help also help + +Line block: + +| line1 +| line2 +| line3 +| line4 +| line5 +| line6 +| line7 + + +Body directives +^^^^^^^^^^^^^^^ + +.. topic:: Title + + Topic body. + +.. sidebar:: Sidebar + :subtitle: Sidebar subtitle + + Sidebar body. + +.. rubric:: Test rubric + +.. epigraph:: Epigraph title + + Epigraph body. + + -- Author + +.. highlights:: Highlights + + Highlights body. + +.. pull-quote:: Pull-quote + + Pull quote body. + +.. compound:: + + a + + b + +.. parsed-literal:: + + with some *markup* inside + + +.. _admonition-section: + +Admonitions +^^^^^^^^^^^ + +.. admonition:: My Admonition + + Admonition text. + +.. note:: + Note text. + +.. warning:: + + Warning text. + +.. _some-label: + +.. tip:: + Tip text. + +Indirect hyperlink targets + +.. _other-label: some-label_ + +Inline markup +------------- + +*Generic inline markup* + +Adding \n to test unescaping. + +* :command:`command\\n` +* :dfn:`dfn\\n` +* :guilabel:`guilabel with &accelerator and \\n` +* :kbd:`kbd\\n` +* :mailheader:`mailheader\\n` +* :makevar:`makevar\\n` +* :manpage:`manpage\\n` +* :mimetype:`mimetype\\n` +* :newsgroup:`newsgroup\\n` +* :program:`program\\n` +* :regexp:`regexp\\n` +* :menuselection:`File --> Close\\n` +* :menuselection:`&File --> &Print` +* :file:`a/{varpart}/b\\n` +* :samp:`print {i}\\n` + +*Linking inline markup* + +* :pep:`8` +* :pep:`Python Enhancement Proposal #8 <8>` +* :rfc:`1` +* :rfc:`Request for Comments #1 <1>` +* :envvar:`HOME` +* :keyword:`with` +* :token:`try statement <try_stmt>` +* :ref:`admonition-section` +* :ref:`here <some-label>` +* :ref:`there <other-label>` +* :ref:`my-figure` +* :ref:`my-figure-name` +* :ref:`my-table` +* :ref:`my-table-name` +* :ref:`my-code-block` +* :ref:`my-code-block-name` +* :ref:`1024` +* :numref:`my-figure` +* :numref:`my-figure-name` +* :numref:`my-table` +* :numref:`my-table-name` +* :numref:`my-code-block` +* :numref:`my-code-block-name` +* :doc:`subdir/includes` +* ``:download:`` is tested in includes.txt +* :option:`Python -c option <python -c>` + +Test :abbr:`abbr (abbreviation)` and another :abbr:`abbr (abbreviation)`. + +Testing the :index:`index` role, also available with +:index:`explicit <pair: title; explicit>` title. + +.. _with: + +With +---- + +(Empty section.) + + +Tables +------ + +.. tabularcolumns:: |L|p{5cm}|R| + +.. _my-table: + +.. table:: my table + :name: my-table-name + + +----+----------------+----+ + | 1 | * Block elems | x | + | | * In table | | + +----+----------------+----+ + | 2 | Empty cells: | | + +----+----------------+----+ + +.. table:: empty cell in table header + + ===== ====== + \ + ===== ====== + 1 2 + 3 4 + ===== ====== + +Tables with multirow and multicol: + +.. only:: latex + + +----+----------------+---------+ + | 1 | test! | c | + +----+---------+------+ | + | 2 | col | col | | + | y +---------+------+----+----+ + | x | multi-column cell | x | + +----+---------------------+----+ + + +----+ + | 1 | + + + + | | + +----+ + +.. list-table:: + :header-rows: 0 + + * - .. figure:: img.png + + figure in table + + +Figures +------- + +.. _my-figure: + +.. figure:: img.png + :name: my-figure-name + + My caption of the figure + + My description paragraph of the figure. + + Description paragraph is wrapped with legend node. + +.. figure:: rimg.png + :align: right + + figure with align option + +.. figure:: rimg.png + :align: right + :figwidth: 50% + + figure with align & figwidth option + +.. figure:: rimg.png + :align: right + :width: 3cm + + figure with align & width option + +Version markup +-------------- + +.. versionadded:: 0.6 + Some funny **stuff**. + +.. versionchanged:: 0.6 + Even more funny stuff. + +.. deprecated:: 0.6 + Boring stuff. + +.. versionadded:: 1.2 + + First paragraph of versionadded. + +.. versionchanged:: 1.2 + First paragraph of versionchanged. + + Second paragraph of versionchanged. + + +Code blocks +----------- + +.. _my-code-block: + +.. code-block:: ruby + :linenos: + :caption: my ruby code + :name: my-code-block-name + + def ruby? + false + end + +Misc stuff +---------- + +Stuff [#]_ + +Reference lookup: [Ref1]_ (defined in another file). +Reference lookup underscore: [Ref_1]_ + +.. seealso:: something, something else, something more + + `Google <http://www.google.com>`_ + For everything. + +.. hlist:: + :columns: 4 + + * This + * is + * a horizontal + * list + * with several + * items + +.. rubric:: Side note + +This is a side note. + +This tests :CLASS:`role names in uppercase`. + +.. centered:: LICENSE AGREEMENT + +.. acks:: + + * Terry Pratchett + * J. R. R. Tolkien + * Monty Python + +.. glossary:: + :sorted: + + boson + Particle with integer spin. + + *fermion* + Particle with half-integer spin. + + tauon + myon + electron + Examples for fermions. + + über + Gewisse + + ähnlich + Dinge + +.. productionlist:: + try_stmt: `try1_stmt` | `try2_stmt` + try1_stmt: "try" ":" `suite` + : ("except" [`expression` ["," `target`]] ":" `suite`)+ + : ["else" ":" `suite`] + : ["finally" ":" `suite`] + try2_stmt: "try" ":" `suite` + : "finally" ":" `suite` + + +Index markup +------------ + +.. index:: + single: entry + pair: entry; pair + double: entry; double + triple: index; entry; triple + see: from; to + seealso: fromalso; toalso + +.. index:: + !Main, !Other + !single: entry; pair + +:index:`!Main` + +.. _ölabel: + +Ö... Some strange characters +---------------------------- + +Testing öäü... + + +Only directive +-------------- + +.. only:: html + + In HTML. + +.. only:: latex + + In LaTeX. + +.. only:: html or latex + + In both. + +.. only:: confpytag and (testtag or nonexisting_tag) + + Always present, because set through conf.py/command line. + + +Any role +-------- + +.. default-role:: any + +Test referencing to `headings <with>` and `objects <func_without_body>`. +Also `modules <mod>` and `classes <Time>`. + +More domains: + +* `JS <bar.baz>` +* `C <SphinxType>` +* `myobj` (user markup) +* `n::Array` +* `perl -c` + +.. default-role:: + + +Smart quotes +------------ + +* Smart "quotes" in English 'text'. +* Smart --- long and -- short dashes. +* Ellipsis... +* No smartypants in literal blocks: ``foo--"bar"...``. + +.. only:: html + + .. LaTeX does not like Cyrillic letters in this test, so it is HTML only. + + .. rst-class:: language-ru + + Этот "абзац" должен использовать 'русские' кавычки. + + .. rst-class:: language-fr + + Il dit : "C'est 'super' !" + +.. rubric:: Footnotes + +.. [#] Like footnotes. + diff --git a/tests/roots/test-root/math.txt b/tests/roots/test-root/math.txt new file mode 100644 index 0000000..5a209be --- /dev/null +++ b/tests/roots/test-root/math.txt @@ -0,0 +1,31 @@ +Test math extensions :math:`E = m c^2` +====================================== + +This is inline math: :math:`a^2 + b^2 = c^2`. + +.. math:: a^2 + b^2 = c^2 + +.. math:: + + a + 1 < b + +.. math:: + :label: foo + + e^{i\pi} = 1 + +.. math:: + :label: + + e^{ix} = \cos x + i\sin x + +.. math:: + + n \in \mathbb N + +.. math:: + :nowrap: + + a + 1 < b + +Referencing equation :eq:`foo`. diff --git a/tests/roots/test-root/objects.txt b/tests/roots/test-root/objects.txt new file mode 100644 index 0000000..ed5f2c2 --- /dev/null +++ b/tests/roots/test-root/objects.txt @@ -0,0 +1,262 @@ +Testing object descriptions +=========================== + +.. function:: func_without_module(a, b, *c[, d]) + + Does something. + +.. function:: func_without_body() + +.. function:: func_with_unknown_field() + + : : + + : empty field name: + + :field_name: + + :field_name all lower: + + :FIELD_NAME: + + :FIELD_NAME ALL CAPS: + + :Field_Name: + + :Field_Name All Word Caps: + + :Field_name: + + :Field_name First word cap: + + :FIELd_name: + + :FIELd_name PARTial caps: + +.. function:: func_noindex + :no-index: + +.. function:: func_with_module + :module: foolib + +Referring to :func:`func with no index <func_noindex>`. +Referring to :func:`nothing <>`. + +.. module:: mod + :synopsis: Module synopsis. + :platform: UNIX + +.. function:: func_in_module + +.. class:: Cls + + .. method:: meth1 + + .. staticmethod:: meths + + .. attribute:: attr + +.. explicit class given +.. method:: Cls.meth2 + +.. explicit module given +.. exception:: Error(arg1, arg2) + :module: errmod + +.. data:: var + + +.. currentmodule:: None + +.. function:: func_without_module2() -> annotation + +.. object:: long(parameter, \ + list) + another one + +.. class:: TimeInt + + Has only one parameter (triggers special behavior...) + + :param moo: |test| + :type moo: |test| + +.. |test| replace:: Moo + +.. class:: Time(hour, minute, isdst) + + :param year: The year. + :type year: TimeInt + :param TimeInt minute: The minute. + :param isdst: whether it's DST + :type isdst: * some complex + * expression + :returns: a new :class:`Time` instance + :rtype: Time + :raises Error: if the values are out of range + :ivar int hour: like *hour* + :ivar minute: like *minute* + :vartype minute: int + :param hour: Some parameter + :type hour: DuplicateType + :param hour: Duplicate param. Should not lead to crashes. + :type hour: DuplicateType + :param .Cls extcls: A class from another module. + +.. raw:: latex + + \begingroup + \let\oldhref\href + \def\href{\ifnum\catcode`\-=\active\errorwithsphinxhref\fi\oldhref} + +.. class:: MyClass + + .. attribute:: config + :type: sphinx.config.Config + + A configuration object. + +.. raw:: latex + + \endgroup + +C items +======= + +.. c:function:: void Sphinx_DoSomething() + +.. c:member:: int SphinxStruct.member + +.. c:macro:: SPHINX_USE_PYTHON + +.. c:type:: SphinxType + +.. c:var:: int sphinx_global + +.. c:function:: PyObject* Py_SphinxFoo(void) + + +Javascript items +================ + +.. js:function:: foo() + +.. js:data:: bar + +.. documenting the method of any object +.. js:function:: bar.baz(href, callback[, errback]) + + :param string href: The location of the resource. + :param callback: Gets called with the data returned by the resource. + :throws InvalidHref: If the `href` is invalid. + :returns: `undefined` + +.. js:attribute:: bar.spam + +References +========== + +Referencing :class:`mod.Cls` or :Class:`mod.Cls` should be the same. + +With target: :c:func:`Sphinx_DoSomething()` (parentheses are handled), +:c:member:`SphinxStruct.member`, :c:macro:`SPHINX_USE_PYTHON`, +:c:type:`SphinxType *` (pointer is handled), :c:data:`sphinx_global`. + +Without target: :c:func:`CFunction`. :c:func:`!malloc`. + +:js:func:`foo()` +:js:func:`foo` + +:js:data:`bar` +:js:func:`bar.baz()` +:js:func:`bar.baz` +:js:func:`~bar.baz()` + +:js:attr:`bar.baz` + + +Others +====== + +.. envvar:: HOME + +.. program:: python + +.. cmdoption:: -c command + +.. program:: perl + +.. cmdoption:: -c + +.. option:: +p + +.. option:: --ObjC++ + +.. option:: --plugin.option + +.. option:: create-auth-token + +.. option:: arg + +.. option:: -j[=N] + +Link to :option:`perl +p`, :option:`--ObjC++`, :option:`--plugin.option`, :option:`create-auth-token`, :option:`arg` and :option:`-j` + +.. program:: hg + +.. option:: commit + +.. program:: git commit + +.. option:: -p + +Link to :option:`hg commit` and :option:`git commit -p`. + +.. option:: --abi={TYPE} + +.. option:: --test={WHERE}-{COUNT} + +.. option:: --wrap=\{\{value\}\} + +.. option:: -allowable_client {client_name} + +Foo bar. + +Test repeated option directive. + +.. option:: -mapi + + My API. + +.. option:: -mapi=secret + + My secret API. + +Reference the first option :option:`-mapi=secret`, :option:`-mapi[=xxx]` +or :option:`-mapi with_space`. + + +User markup +=========== + +.. userdesc:: myobj:parameter + + Description of userdesc. + + +Referencing :userdescrole:`myobj`. + + +CPP domain +========== + +.. cpp:class:: n::Array + + .. cpp:function:: T& operator[]( unsigned j ) + const T& operator[]( unsigned j ) const + +.. cpp:function:: template<typename T1, typename T2> \ + requires A<T1, T2> \ + void f() + +- :cpp:expr:`a + b` diff --git a/tests/roots/test-root/otherext.foo b/tests/roots/test-root/otherext.foo new file mode 100644 index 0000000..531ea29 --- /dev/null +++ b/tests/roots/test-root/otherext.foo @@ -0,0 +1,2 @@ +The contents of this file are ignored. +The file is "parsed" using Parser in the tests/root/parsermod.py file. diff --git a/tests/roots/test-root/parsermod.py b/tests/roots/test-root/parsermod.py new file mode 100644 index 0000000..de0849c --- /dev/null +++ b/tests/roots/test-root/parsermod.py @@ -0,0 +1,14 @@ +from docutils import nodes +from docutils.parsers import Parser + + +class Parser(Parser): + supported = ('foo',) + + def parse(self, input, document): + section = nodes.section(ids=['id1']) + section += nodes.title('Generated section', 'Generated section') + document += section + + def get_transforms(self): + return [] diff --git a/tests/roots/test-root/quotes.inc b/tests/roots/test-root/quotes.inc new file mode 100644 index 0000000..276cc56 --- /dev/null +++ b/tests/roots/test-root/quotes.inc @@ -0,0 +1 @@ +Testing "quotes" in literal 'included' text. diff --git a/tests/roots/test-root/rimg.png b/tests/roots/test-root/rimg.png Binary files differnew file mode 100644 index 0000000..fda6cd2 --- /dev/null +++ b/tests/roots/test-root/rimg.png diff --git a/tests/roots/test-root/special/api.h b/tests/roots/test-root/special/api.h new file mode 100644 index 0000000..2bf2382 --- /dev/null +++ b/tests/roots/test-root/special/api.h @@ -0,0 +1,2 @@ +PyAPI_FUNC(PyObject *) Py_SphinxTest(void); +PyAPI_FUNC(PyObject *) Py_SphinxFoo(void); diff --git a/tests/roots/test-root/special/code.py b/tests/roots/test-root/special/code.py new file mode 100644 index 0000000..b7934b2 --- /dev/null +++ b/tests/roots/test-root/special/code.py @@ -0,0 +1,2 @@ +print("line 1") +print("line 2") diff --git a/tests/roots/test-root/subdir/excluded.txt b/tests/roots/test-root/subdir/excluded.txt new file mode 100644 index 0000000..5df3139 --- /dev/null +++ b/tests/roots/test-root/subdir/excluded.txt @@ -0,0 +1,2 @@ +Excluded file -- should *not* be read as source +----------------------------------------------- diff --git a/tests/roots/test-root/subdir/images.txt b/tests/roots/test-root/subdir/images.txt new file mode 100644 index 0000000..f2adf88 --- /dev/null +++ b/tests/roots/test-root/subdir/images.txt @@ -0,0 +1,6 @@ +Image including source in subdir +================================ + +.. image:: img.* + +.. image:: /rimg.png diff --git a/tests/roots/test-root/subdir/img.png b/tests/roots/test-root/subdir/img.png Binary files differnew file mode 100644 index 0000000..a97e86d --- /dev/null +++ b/tests/roots/test-root/subdir/img.png diff --git a/tests/roots/test-root/subdir/include.inc b/tests/roots/test-root/subdir/include.inc new file mode 100644 index 0000000..d89275d --- /dev/null +++ b/tests/roots/test-root/subdir/include.inc @@ -0,0 +1,5 @@ +.. This file is included by contents.txt. + +.. Paths in included files are relative to the file that + includes them +.. image:: subdir/img.png diff --git a/tests/roots/test-root/subdir/includes.txt b/tests/roots/test-root/subdir/includes.txt new file mode 100644 index 0000000..627dcfb --- /dev/null +++ b/tests/roots/test-root/subdir/includes.txt @@ -0,0 +1,18 @@ +Including in subdir +=================== + +.. absolute filename +.. literalinclude:: /special/code.py + :lines: 1 + +.. relative filename +.. literalinclude:: ../special/code.py + :lines: 2 + +Absolute :download:`/img.png` download. + +.. absolute image filename +.. image:: /img.png + +.. absolute include filename +.. include:: /test.inc diff --git a/tests/roots/test-root/subdir/simg.png b/tests/roots/test-root/subdir/simg.png Binary files differnew file mode 100644 index 0000000..a97e86d --- /dev/null +++ b/tests/roots/test-root/subdir/simg.png diff --git a/tests/roots/test-root/svgimg.pdf b/tests/roots/test-root/svgimg.pdf Binary files differnew file mode 100644 index 0000000..cacbd85 --- /dev/null +++ b/tests/roots/test-root/svgimg.pdf diff --git a/tests/roots/test-root/svgimg.svg b/tests/roots/test-root/svgimg.svg new file mode 100644 index 0000000..2bae0b9 --- /dev/null +++ b/tests/roots/test-root/svgimg.svg @@ -0,0 +1,4 @@ +<?xml version="1.0" encoding="UTF-8" standalone="no"?> +<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" height="60" width="60"> + <circle cx="40" cy="40" r="24" style="stroke:#000000; fill:#ffffff"/> +</svg> diff --git a/tests/roots/test-root/tabs.inc b/tests/roots/test-root/tabs.inc new file mode 100644 index 0000000..20b5182 --- /dev/null +++ b/tests/roots/test-root/tabs.inc @@ -0,0 +1,5 @@ +Tabs include file test +---------------------- + +The next line has a tab: +-| |- diff --git a/tests/roots/test-root/test.inc b/tests/roots/test-root/test.inc new file mode 100644 index 0000000..4773390 --- /dev/null +++ b/tests/roots/test-root/test.inc @@ -0,0 +1,3 @@ +.. This file is included from subdir/includes.txt. + +This is an include file. diff --git a/tests/roots/test-root/wrongenc.inc b/tests/roots/test-root/wrongenc.inc new file mode 100644 index 0000000..700f613 --- /dev/null +++ b/tests/roots/test-root/wrongenc.inc @@ -0,0 +1,3 @@ +This file is encoded in latin-1 but at first read as utf-8. + +Max Strau a in Mnchen eine Leberkssemmel. diff --git a/tests/roots/test-search/conf.py b/tests/roots/test-search/conf.py new file mode 100644 index 0000000..8613f5f --- /dev/null +++ b/tests/roots/test-search/conf.py @@ -0,0 +1,2 @@ +exclude_patterns = ['_build'] +html_search_language = 'en' diff --git a/tests/roots/test-search/index.rst b/tests/roots/test-search/index.rst new file mode 100644 index 0000000..fc2298b --- /dev/null +++ b/tests/roots/test-search/index.rst @@ -0,0 +1,30 @@ +meta keywords +============= + +.. meta:: + :keywords lang=en: findthiskey, thistoo, notgerman + :keywords: thisonetoo + :keywords lang=de: onlygerman, onlytoogerman + :description: thisnoteither + +Stemmer +======= + +bat +findthisstemmedkey + +textinheading + +International + +.. toctree:: + + tocitem + +.. raw:: html + + <span class="raw">rawword"</span> + +.. raw:: latex + + latex_keyword diff --git a/tests/roots/test-search/nosearch.rst b/tests/roots/test-search/nosearch.rst new file mode 100644 index 0000000..4f79575 --- /dev/null +++ b/tests/roots/test-search/nosearch.rst @@ -0,0 +1,7 @@ +:nosearch: + +nosearch +======== + +bat +latex diff --git a/tests/roots/test-search/tocitem.rst b/tests/roots/test-search/tocitem.rst new file mode 100644 index 0000000..98a1dc7 --- /dev/null +++ b/tests/roots/test-search/tocitem.rst @@ -0,0 +1,17 @@ +heading 1 +========= + +lorem ipsum + +bat + +textinheading +============= + +lorem ipsum + +可以查看 FAQ 模块中 Chinesetest 部分 + +模块中 CAS service部分 + +可以Chinesetesttwo查看 diff --git a/tests/roots/test-smartquotes/conf.py b/tests/roots/test-smartquotes/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-smartquotes/conf.py diff --git a/tests/roots/test-smartquotes/index.rst b/tests/roots/test-smartquotes/index.rst new file mode 100644 index 0000000..7dfd01a --- /dev/null +++ b/tests/roots/test-smartquotes/index.rst @@ -0,0 +1,8 @@ +test-smartquotes +================ + +-- "Sphinx" is a tool that makes it easy ... + +.. toctree:: + + literals diff --git a/tests/roots/test-smartquotes/literals.rst b/tests/roots/test-smartquotes/literals.rst new file mode 100644 index 0000000..ed77c80 --- /dev/null +++ b/tests/roots/test-smartquotes/literals.rst @@ -0,0 +1,12 @@ +literals +======== + +.. role:: python(code) + :language: python +.. default-role:: python + +Standard :code:`code role with 'quotes'` + +This is a Python :python:`{'code': 'role', 'with': 'quotes'}`. + +This is a ``literal with 'quotes'`` diff --git a/tests/roots/test-stylesheets/_templates/layout.html b/tests/roots/test-stylesheets/_templates/layout.html new file mode 100644 index 0000000..d048fe4 --- /dev/null +++ b/tests/roots/test-stylesheets/_templates/layout.html @@ -0,0 +1,8 @@ +{% extends "!layout.html" %} +{%- block css %} + {{ super() }} + <link rel="stylesheet" href="_static/more_persistent.css" type="text/css" /> + <link rel="stylesheet" href="_static/more_default.css" type="text/css" title="Default" /> + <link rel="alternate stylesheet" href="_static/more_alternate1.css" type="text/css" title="Alternate" /> + <link rel="alternate stylesheet" href="_static/more_alternate2.css" type="text/css" /> +{%- endblock %} diff --git a/tests/roots/test-stylesheets/conf.py b/tests/roots/test-stylesheets/conf.py new file mode 100644 index 0000000..fa37130 --- /dev/null +++ b/tests/roots/test-stylesheets/conf.py @@ -0,0 +1,9 @@ +html_theme = 'classic' +templates_path = ['_templates'] + + +def setup(app): + app.add_css_file('persistent.css') + app.add_css_file('default.css', title="Default") + app.add_css_file('alternate1.css', title="Alternate", rel="alternate stylesheet") + app.add_css_file('alternate2.css', rel="alternate stylesheet") diff --git a/tests/roots/test-stylesheets/index.rst b/tests/roots/test-stylesheets/index.rst new file mode 100644 index 0000000..c5c5766 --- /dev/null +++ b/tests/roots/test-stylesheets/index.rst @@ -0,0 +1,4 @@ +test-stylesheets +================ + +Lorem ipsum dolor diff --git a/tests/roots/test-templating/_templates/autosummary/class.rst b/tests/roots/test-templating/_templates/autosummary/class.rst new file mode 100644 index 0000000..6f50564 --- /dev/null +++ b/tests/roots/test-templating/_templates/autosummary/class.rst @@ -0,0 +1,9 @@ +{% extends "!autosummary/class.rst" %} + +{% block methods %} + + .. note:: autosummary/class.rst method block overloading + {{ sentence }} + + {{ super() }} +{% endblock %} diff --git a/tests/roots/test-templating/_templates/layout.html b/tests/roots/test-templating/_templates/layout.html new file mode 100644 index 0000000..f836c77 --- /dev/null +++ b/tests/roots/test-templating/_templates/layout.html @@ -0,0 +1,6 @@ +{% extends "!layout.html" %} + +{% block extrahead %} +<!-- layout overloading --> +{{ super() }} +{% endblock %} diff --git a/tests/roots/test-templating/autosummary_templating.txt b/tests/roots/test-templating/autosummary_templating.txt new file mode 100644 index 0000000..6b396a3 --- /dev/null +++ b/tests/roots/test-templating/autosummary_templating.txt @@ -0,0 +1,7 @@ +Autosummary templating test +=========================== + +.. autosummary:: + :toctree: generated + + sphinx.application.TemplateBridge diff --git a/tests/roots/test-templating/conf.py b/tests/roots/test-templating/conf.py new file mode 100644 index 0000000..e03eaf1 --- /dev/null +++ b/tests/roots/test-templating/conf.py @@ -0,0 +1,9 @@ +project = 'Sphinx templating <Tests>' +source_suffix = '.txt' +keep_warnings = True +templates_path = ['_templates'] +release = version = '2013.120' +exclude_patterns = ['_build'] + +extensions = ['sphinx.ext.autosummary'] +autosummary_generate = ['autosummary_templating'] diff --git a/tests/roots/test-templating/index.txt b/tests/roots/test-templating/index.txt new file mode 100644 index 0000000..04a40e2 --- /dev/null +++ b/tests/roots/test-templating/index.txt @@ -0,0 +1,7 @@ +Welcome to Sphinx Tests's documentation! +======================================== + +.. toctree:: + + autosummary_templating + diff --git a/tests/roots/test-theming/child.zip b/tests/roots/test-theming/child.zip Binary files differnew file mode 100644 index 0000000..b4a6a56 --- /dev/null +++ b/tests/roots/test-theming/child.zip diff --git a/tests/roots/test-theming/conf.py b/tests/roots/test-theming/conf.py new file mode 100644 index 0000000..0db7cf0 --- /dev/null +++ b/tests/roots/test-theming/conf.py @@ -0,0 +1,3 @@ +html_theme = 'test-theme' +html_theme_path = ['.', 'test_theme'] +exclude_patterns = ['_build'] diff --git a/tests/roots/test-theming/index.rst b/tests/roots/test-theming/index.rst new file mode 100644 index 0000000..214dcd7 --- /dev/null +++ b/tests/roots/test-theming/index.rst @@ -0,0 +1,5 @@ +======= +Theming +======= + + diff --git a/tests/roots/test-theming/parent.zip b/tests/roots/test-theming/parent.zip Binary files differnew file mode 100644 index 0000000..8a246ed --- /dev/null +++ b/tests/roots/test-theming/parent.zip diff --git a/tests/roots/test-theming/test_theme/__init__.py b/tests/roots/test-theming/test_theme/__init__.py new file mode 100644 index 0000000..13bdc4b --- /dev/null +++ b/tests/roots/test-theming/test_theme/__init__.py @@ -0,0 +1,5 @@ +import os + + +def get_path(): + return os.path.dirname(os.path.abspath(__file__)) diff --git a/tests/roots/test-theming/test_theme/staticfiles/layout.html b/tests/roots/test-theming/test_theme/staticfiles/layout.html new file mode 100644 index 0000000..81372be --- /dev/null +++ b/tests/roots/test-theming/test_theme/staticfiles/layout.html @@ -0,0 +1,5 @@ +{% extends "basic/layout.html" %} +{% block extrahead %} +<meta name="testopt" content="{{ theme_testopt }}" /> +{{ super() }} +{% endblock %} diff --git a/tests/roots/test-theming/test_theme/staticfiles/static/staticimg.png b/tests/roots/test-theming/test_theme/staticfiles/static/staticimg.png Binary files differnew file mode 100644 index 0000000..fda6cd2 --- /dev/null +++ b/tests/roots/test-theming/test_theme/staticfiles/static/staticimg.png diff --git a/tests/roots/test-theming/test_theme/staticfiles/static/statictmpl.html_t b/tests/roots/test-theming/test_theme/staticfiles/static/statictmpl.html_t new file mode 100644 index 0000000..4ab292b --- /dev/null +++ b/tests/roots/test-theming/test_theme/staticfiles/static/statictmpl.html_t @@ -0,0 +1,2 @@ +<!-- testing static templates --> +<html><project>{{ project|e }}</project></html> diff --git a/tests/roots/test-theming/test_theme/staticfiles/theme.conf b/tests/roots/test-theming/test_theme/staticfiles/theme.conf new file mode 100644 index 0000000..a877673 --- /dev/null +++ b/tests/roots/test-theming/test_theme/staticfiles/theme.conf @@ -0,0 +1,7 @@ +[theme] +inherit = basic +stylesheet = default.css +pygments_style = emacs + +[options] +testopt = optdefault diff --git a/tests/roots/test-theming/test_theme/test-theme/theme.conf b/tests/roots/test-theming/test_theme/test-theme/theme.conf new file mode 100644 index 0000000..2ad2c33 --- /dev/null +++ b/tests/roots/test-theming/test_theme/test-theme/theme.conf @@ -0,0 +1,4 @@ +[theme] +inherit = classic +sidebars = globaltoc.html, searchbox.html +pygments_dark_style = monokai diff --git a/tests/roots/test-theming/ziptheme.zip b/tests/roots/test-theming/ziptheme.zip Binary files differnew file mode 100644 index 0000000..8a246ed --- /dev/null +++ b/tests/roots/test-theming/ziptheme.zip diff --git a/tests/roots/test-tocdepth/bar.rst b/tests/roots/test-tocdepth/bar.rst new file mode 100644 index 0000000..d70dec9 --- /dev/null +++ b/tests/roots/test-tocdepth/bar.rst @@ -0,0 +1,27 @@ +:tocdepth: 2 + +=== +Bar +=== + +should be 2 + +Bar A +===== + +should be 2.1 + +.. toctree:: + + baz + +Bar B +===== + +should be 2.2 + +Bar B1 +------ + +should be 2.2.1 + diff --git a/tests/roots/test-tocdepth/baz.rst b/tests/roots/test-tocdepth/baz.rst new file mode 100644 index 0000000..b07fa05 --- /dev/null +++ b/tests/roots/test-tocdepth/baz.rst @@ -0,0 +1,5 @@ +Baz A +----- + +should be 2.1.1 + diff --git a/tests/roots/test-tocdepth/conf.py b/tests/roots/test-tocdepth/conf.py new file mode 100644 index 0000000..46bb290 --- /dev/null +++ b/tests/roots/test-tocdepth/conf.py @@ -0,0 +1,2 @@ +html_theme = 'classic' +exclude_patterns = ['_build'] diff --git a/tests/roots/test-tocdepth/foo.rst b/tests/roots/test-tocdepth/foo.rst new file mode 100644 index 0000000..61fd539 --- /dev/null +++ b/tests/roots/test-tocdepth/foo.rst @@ -0,0 +1,26 @@ +=== +Foo +=== + +should be 1 + +Foo A +===== + +should be 1.1 + +Foo A1 +------ + +should be 1.1.1 + +Foo B +===== + +should be 1.2 + +Foo B1 +------ + +should be 1.2.1 + diff --git a/tests/roots/test-tocdepth/index.rst b/tests/roots/test-tocdepth/index.rst new file mode 100644 index 0000000..0b651d4 --- /dev/null +++ b/tests/roots/test-tocdepth/index.rst @@ -0,0 +1,8 @@ +test-tocdepth +============= + +.. toctree:: + :numbered: + + foo + bar diff --git a/tests/roots/test-toctree-domain-objects/conf.py b/tests/roots/test-toctree-domain-objects/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-toctree-domain-objects/conf.py diff --git a/tests/roots/test-toctree-domain-objects/domains.rst b/tests/roots/test-toctree-domain-objects/domains.rst new file mode 100644 index 0000000..61467cf --- /dev/null +++ b/tests/roots/test-toctree-domain-objects/domains.rst @@ -0,0 +1,39 @@ +test-domain-objects +=================== + +.. py:module:: hello + +.. py:function:: world() -> str + + Prints "Hello, World!" to stdout + +.. py:class:: HelloWorldPrinter + + Controls printing of hello world + + .. py:method:: set_language() + + Sets the language of the HelloWorldPrinter instance + + .. py:attribute:: output_count + + Count of outputs of "Hello, World!" + + .. py:method:: print_normal() + :async: + :classmethod: + + Prints the normal form of "Hello, World!" + + .. py:method:: print() + + Prints "Hello, World!", including in the chosen language + +.. py:function:: exit() + :module: sys + + Quits the interpreter + +.. js:function:: fetch(resource) + + Fetches the given resource, returns a Promise
\ No newline at end of file diff --git a/tests/roots/test-toctree-domain-objects/index.rst b/tests/roots/test-toctree-domain-objects/index.rst new file mode 100644 index 0000000..77ee010 --- /dev/null +++ b/tests/roots/test-toctree-domain-objects/index.rst @@ -0,0 +1,6 @@ +.. toctree:: + :numbered: + :caption: Table of Contents + :name: mastertoc + + domains diff --git a/tests/roots/test-toctree-duplicated/conf.py b/tests/roots/test-toctree-duplicated/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-toctree-duplicated/conf.py diff --git a/tests/roots/test-toctree-duplicated/foo.rst b/tests/roots/test-toctree-duplicated/foo.rst new file mode 100644 index 0000000..f23d4ce --- /dev/null +++ b/tests/roots/test-toctree-duplicated/foo.rst @@ -0,0 +1,2 @@ +foo +=== diff --git a/tests/roots/test-toctree-duplicated/index.rst b/tests/roots/test-toctree-duplicated/index.rst new file mode 100644 index 0000000..38a8c44 --- /dev/null +++ b/tests/roots/test-toctree-duplicated/index.rst @@ -0,0 +1,7 @@ +test-toctree-duplicated +======================= + +.. toctree:: + + foo + foo diff --git a/tests/roots/test-toctree-empty/_templates/localtoc.html b/tests/roots/test-toctree-empty/_templates/localtoc.html new file mode 100644 index 0000000..0fd9139 --- /dev/null +++ b/tests/roots/test-toctree-empty/_templates/localtoc.html @@ -0,0 +1,2 @@ +{# This will call toctree unconditionally, whether there is a local or global toc #} +{{ toctree() }} diff --git a/tests/roots/test-toctree-empty/conf.py b/tests/roots/test-toctree-empty/conf.py new file mode 100644 index 0000000..bda61da --- /dev/null +++ b/tests/roots/test-toctree-empty/conf.py @@ -0,0 +1,2 @@ +exclude_patterns = ['_build'] +templates_path = ['_templates'] diff --git a/tests/roots/test-toctree-empty/index.rst b/tests/roots/test-toctree-empty/index.rst new file mode 100644 index 0000000..0b97a60 --- /dev/null +++ b/tests/roots/test-toctree-empty/index.rst @@ -0,0 +1,4 @@ +test-toctree-empty +================== + +.. toctree:: diff --git a/tests/roots/test-toctree-glob/bar/bar_1.rst b/tests/roots/test-toctree-glob/bar/bar_1.rst new file mode 100644 index 0000000..6229a15 --- /dev/null +++ b/tests/roots/test-toctree-glob/bar/bar_1.rst @@ -0,0 +1,4 @@ +Bar-1 +===== + +bar diff --git a/tests/roots/test-toctree-glob/bar/bar_2.rst b/tests/roots/test-toctree-glob/bar/bar_2.rst new file mode 100644 index 0000000..ed78621 --- /dev/null +++ b/tests/roots/test-toctree-glob/bar/bar_2.rst @@ -0,0 +1,4 @@ +Bar-2 +===== + +bar diff --git a/tests/roots/test-toctree-glob/bar/bar_3.rst b/tests/roots/test-toctree-glob/bar/bar_3.rst new file mode 100644 index 0000000..93c58d4 --- /dev/null +++ b/tests/roots/test-toctree-glob/bar/bar_3.rst @@ -0,0 +1,4 @@ +Bar-3 +===== + +bar diff --git a/tests/roots/test-toctree-glob/bar/bar_4/index.rst b/tests/roots/test-toctree-glob/bar/bar_4/index.rst new file mode 100644 index 0000000..4fae623 --- /dev/null +++ b/tests/roots/test-toctree-glob/bar/bar_4/index.rst @@ -0,0 +1,4 @@ +Bar-4 +===== + +bar diff --git a/tests/roots/test-toctree-glob/bar/index.rst b/tests/roots/test-toctree-glob/bar/index.rst new file mode 100644 index 0000000..74a9ba9 --- /dev/null +++ b/tests/roots/test-toctree-glob/bar/index.rst @@ -0,0 +1,8 @@ +Bar +=== + +.. toctree:: + :glob: + + * + bar_4/index diff --git a/tests/roots/test-toctree-glob/baz.rst b/tests/roots/test-toctree-glob/baz.rst new file mode 100644 index 0000000..2c1bbbc --- /dev/null +++ b/tests/roots/test-toctree-glob/baz.rst @@ -0,0 +1,4 @@ +Baz +=== + +baz diff --git a/tests/roots/test-toctree-glob/conf.py b/tests/roots/test-toctree-glob/conf.py new file mode 100644 index 0000000..a45d22e --- /dev/null +++ b/tests/roots/test-toctree-glob/conf.py @@ -0,0 +1 @@ +exclude_patterns = ['_build'] diff --git a/tests/roots/test-toctree-glob/foo.rst b/tests/roots/test-toctree-glob/foo.rst new file mode 100644 index 0000000..83f9522 --- /dev/null +++ b/tests/roots/test-toctree-glob/foo.rst @@ -0,0 +1,4 @@ +Foo +=== + +foo diff --git a/tests/roots/test-toctree-glob/index.rst b/tests/roots/test-toctree-glob/index.rst new file mode 100644 index 0000000..4ed6bb4 --- /dev/null +++ b/tests/roots/test-toctree-glob/index.rst @@ -0,0 +1,28 @@ +test-toctree-glob +================= + +normal order +------------ + +.. toctree:: + :glob: + + foo + bar/index + bar/* + baz + qux/index + hyperref <https://sphinx-doc.org/?q=sphinx> + +reversed order +-------------- + +.. toctree:: + :glob: + :reversed: + + foo + bar/index + bar/* + baz + qux/index diff --git a/tests/roots/test-toctree-glob/quux.rst b/tests/roots/test-toctree-glob/quux.rst new file mode 100644 index 0000000..340389d --- /dev/null +++ b/tests/roots/test-toctree-glob/quux.rst @@ -0,0 +1,4 @@ +Quux +==== + +quux diff --git a/tests/roots/test-toctree-glob/qux/index.rst b/tests/roots/test-toctree-glob/qux/index.rst new file mode 100644 index 0000000..ad0bee5 --- /dev/null +++ b/tests/roots/test-toctree-glob/qux/index.rst @@ -0,0 +1,8 @@ +Qux +=== + +.. toctree:: + :glob: + :hidden: + + * diff --git a/tests/roots/test-toctree-glob/qux/qux_1.rst b/tests/roots/test-toctree-glob/qux/qux_1.rst new file mode 100644 index 0000000..bac227b --- /dev/null +++ b/tests/roots/test-toctree-glob/qux/qux_1.rst @@ -0,0 +1,4 @@ +Qux-1 +===== + +qux diff --git a/tests/roots/test-toctree-glob/qux/qux_2.rst b/tests/roots/test-toctree-glob/qux/qux_2.rst new file mode 100644 index 0000000..bac227b --- /dev/null +++ b/tests/roots/test-toctree-glob/qux/qux_2.rst @@ -0,0 +1,4 @@ +Qux-1 +===== + +qux diff --git a/tests/roots/test-toctree-index/conf.py b/tests/roots/test-toctree-index/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-toctree-index/conf.py diff --git a/tests/roots/test-toctree-index/foo.rst b/tests/roots/test-toctree-index/foo.rst new file mode 100644 index 0000000..fc2c716 --- /dev/null +++ b/tests/roots/test-toctree-index/foo.rst @@ -0,0 +1,8 @@ +foo +=== + +:index:`word` + +.. py:module:: pymodule + +.. py:function:: Timer.repeat(repeat=3, number=1000000) diff --git a/tests/roots/test-toctree-index/index.rst b/tests/roots/test-toctree-index/index.rst new file mode 100644 index 0000000..eb211c5 --- /dev/null +++ b/tests/roots/test-toctree-index/index.rst @@ -0,0 +1,15 @@ +test-toctree-index +================== + +.. toctree:: + + foo + + +.. toctree:: + :caption: Indices + + genindex + modindex + search + diff --git a/tests/roots/test-toctree-maxdepth/bar.rst b/tests/roots/test-toctree-maxdepth/bar.rst new file mode 100644 index 0000000..d70dec9 --- /dev/null +++ b/tests/roots/test-toctree-maxdepth/bar.rst @@ -0,0 +1,27 @@ +:tocdepth: 2 + +=== +Bar +=== + +should be 2 + +Bar A +===== + +should be 2.1 + +.. toctree:: + + baz + +Bar B +===== + +should be 2.2 + +Bar B1 +------ + +should be 2.2.1 + diff --git a/tests/roots/test-toctree-maxdepth/baz.rst b/tests/roots/test-toctree-maxdepth/baz.rst new file mode 100644 index 0000000..b07fa05 --- /dev/null +++ b/tests/roots/test-toctree-maxdepth/baz.rst @@ -0,0 +1,5 @@ +Baz A +----- + +should be 2.1.1 + diff --git a/tests/roots/test-toctree-maxdepth/conf.py b/tests/roots/test-toctree-maxdepth/conf.py new file mode 100644 index 0000000..a45d22e --- /dev/null +++ b/tests/roots/test-toctree-maxdepth/conf.py @@ -0,0 +1 @@ +exclude_patterns = ['_build'] diff --git a/tests/roots/test-toctree-maxdepth/foo.rst b/tests/roots/test-toctree-maxdepth/foo.rst new file mode 100644 index 0000000..61fd539 --- /dev/null +++ b/tests/roots/test-toctree-maxdepth/foo.rst @@ -0,0 +1,26 @@ +=== +Foo +=== + +should be 1 + +Foo A +===== + +should be 1.1 + +Foo A1 +------ + +should be 1.1.1 + +Foo B +===== + +should be 1.2 + +Foo B1 +------ + +should be 1.2.1 + diff --git a/tests/roots/test-toctree-maxdepth/index.rst b/tests/roots/test-toctree-maxdepth/index.rst new file mode 100644 index 0000000..30dc61c --- /dev/null +++ b/tests/roots/test-toctree-maxdepth/index.rst @@ -0,0 +1,9 @@ +test-toctree-max-depth +====================== + +.. toctree:: + :numbered: + :maxdepth: 2 + + foo + bar diff --git a/tests/roots/test-toctree-maxdepth/qux.rst b/tests/roots/test-toctree-maxdepth/qux.rst new file mode 100644 index 0000000..35e9ac1 --- /dev/null +++ b/tests/roots/test-toctree-maxdepth/qux.rst @@ -0,0 +1,9 @@ +test-toctree-max-depth +====================== + +.. toctree:: + :numbered: + :maxdepth: 4 + + foo + bar diff --git a/tests/roots/test-toctree/bar.rst b/tests/roots/test-toctree/bar.rst new file mode 100644 index 0000000..1cccd3c --- /dev/null +++ b/tests/roots/test-toctree/bar.rst @@ -0,0 +1,2 @@ +bar +=== diff --git a/tests/roots/test-toctree/baz.rst b/tests/roots/test-toctree/baz.rst new file mode 100644 index 0000000..52e2e72 --- /dev/null +++ b/tests/roots/test-toctree/baz.rst @@ -0,0 +1,2 @@ +baz +=== diff --git a/tests/roots/test-toctree/conf.py b/tests/roots/test-toctree/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-toctree/conf.py diff --git a/tests/roots/test-toctree/foo.rst b/tests/roots/test-toctree/foo.rst new file mode 100644 index 0000000..49f4d4b --- /dev/null +++ b/tests/roots/test-toctree/foo.rst @@ -0,0 +1,15 @@ +foo +=== + +.. toctree:: + + quux + +foo.1 +----- + +foo.1-1 +^^^^^^^ + +foo.2 +----- diff --git a/tests/roots/test-toctree/index.rst b/tests/roots/test-toctree/index.rst new file mode 100644 index 0000000..adf1b84 --- /dev/null +++ b/tests/roots/test-toctree/index.rst @@ -0,0 +1,55 @@ +.. Sphinx Tests documentation master file, created by sphinx-quickstart on Wed Jun 4 23:49:58 2008. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to Sphinx Tests's documentation! +======================================== + +Contents: + +.. toctree:: + :maxdepth: 2 + :numbered: + :caption: Table of Contents + :name: mastertoc + + foo + bar + http://sphinx-doc.org/ + self + +.. only:: html + + Section for HTML + ---------------- + + .. toctree:: + + baz + +---------- +subsection +---------- + +subsubsection +------------- + +Test for issue #1157 +==================== + +This used to crash: + +.. toctree:: + +.. toctree:: + :hidden: + + Latest reference <http://sphinx-doc.org/latest/> + Python <http://python.org/> + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/tests/roots/test-toctree/quux.rst b/tests/roots/test-toctree/quux.rst new file mode 100644 index 0000000..07dd0a0 --- /dev/null +++ b/tests/roots/test-toctree/quux.rst @@ -0,0 +1,2 @@ +quux +==== diff --git a/tests/roots/test-toctree/qux.rst b/tests/roots/test-toctree/qux.rst new file mode 100644 index 0000000..26176b9 --- /dev/null +++ b/tests/roots/test-toctree/qux.rst @@ -0,0 +1 @@ +qux.rst has no section title diff --git a/tests/roots/test-toctree/tocdepth.rst b/tests/roots/test-toctree/tocdepth.rst new file mode 100644 index 0000000..1069b4c --- /dev/null +++ b/tests/roots/test-toctree/tocdepth.rst @@ -0,0 +1,15 @@ +:tocdepth: 2 + +======= +level 1 +======= + +level 2 +======= + +------- +level 3 +------- + +level 4 +------- diff --git a/tests/roots/test-transforms-post_transforms-keyboard/conf.py b/tests/roots/test-transforms-post_transforms-keyboard/conf.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/roots/test-transforms-post_transforms-keyboard/conf.py diff --git a/tests/roots/test-transforms-post_transforms-keyboard/index.rst b/tests/roots/test-transforms-post_transforms-keyboard/index.rst new file mode 100644 index 0000000..2177578 --- /dev/null +++ b/tests/roots/test-transforms-post_transforms-keyboard/index.rst @@ -0,0 +1,4 @@ +Regression test for issue 10495 +=============================== + +:kbd:`spanish - inquisition` diff --git a/tests/roots/test-transforms-post_transforms-missing-reference/conf.py b/tests/roots/test-transforms-post_transforms-missing-reference/conf.py new file mode 100644 index 0000000..2db221c --- /dev/null +++ b/tests/roots/test-transforms-post_transforms-missing-reference/conf.py @@ -0,0 +1 @@ +nitpicky = True diff --git a/tests/roots/test-transforms-post_transforms-missing-reference/index.rst b/tests/roots/test-transforms-post_transforms-missing-reference/index.rst new file mode 100644 index 0000000..7180978 --- /dev/null +++ b/tests/roots/test-transforms-post_transforms-missing-reference/index.rst @@ -0,0 +1,5 @@ +transforms-post_transforms-missing-reference +============================================ + +:class:`io.StringIO` + diff --git a/tests/roots/test-trim_doctest_flags/conf.py b/tests/roots/test-trim_doctest_flags/conf.py new file mode 100644 index 0000000..77c7255 --- /dev/null +++ b/tests/roots/test-trim_doctest_flags/conf.py @@ -0,0 +1 @@ +extensions = ['sphinx.ext.doctest'] diff --git a/tests/roots/test-trim_doctest_flags/index.rst b/tests/roots/test-trim_doctest_flags/index.rst new file mode 100644 index 0000000..d63251a --- /dev/null +++ b/tests/roots/test-trim_doctest_flags/index.rst @@ -0,0 +1,40 @@ +test-trim_doctest_flags +======================= + +.. code-block:: pycon + + >>> datetime.date.now() # doctest: +FOO + datetime.date(2008, 1, 1) + +.. code-block:: none + + >>> datetime.date.now() # doctest: +BAR + datetime.date(2008, 1, 1) + +.. code-block:: guess + + # vim: set filetype=pycon + >>> datetime.date.now() # doctest: +BAZ + datetime.date(2008, 1, 1) + +.. testcode:: + + >>> datetime.date.now() # doctest: +QUX + datetime.date(2008, 1, 1) + +.. doctest:: + + >>> datetime.date.now() # doctest: +QUUX + datetime.date(2008, 1, 1) + +.. doctest:: + :trim-doctest-flags: + + >>> datetime.date.now() # doctest: +CORGE + datetime.date(2008, 1, 1) + +.. doctest:: + :no-trim-doctest-flags: + + >>> datetime.date.now() # doctest: +GRAULT + datetime.date(2008, 1, 1) diff --git a/tests/roots/test-versioning/added.txt b/tests/roots/test-versioning/added.txt new file mode 100644 index 0000000..22a7073 --- /dev/null +++ b/tests/roots/test-versioning/added.txt @@ -0,0 +1,20 @@ +Versioning test text +==================== + +So the thing is I need some kind of text - not the lorem ipsum stuff, that +doesn't work out that well - to test :mod:`sphinx.versioning`. I couldn't find +a good text for that under public domain so I thought the easiest solution is +to write one by myself. It's not really interesting, in fact it is *really* +boring. + +Anyway I need more than one paragraph, at least three for the original +document, I think, and another one for two different ones. + +So the previous paragraph was a bit short because I don't want to test this +only on long paragraphs, I hope it was short enough to cover most stuff. +Anyway I see this lacks ``some markup`` so I have to add a **little** bit. + +Woho another paragraph, if this test fails we really have a problem because +this means the algorithm itself fails and not the diffing algorithm which is +pretty much doomed anyway as it probably fails for some kind of language +respecting certain nodes anyway but we can't work around that anyway. diff --git a/tests/roots/test-versioning/conf.py b/tests/roots/test-versioning/conf.py new file mode 100644 index 0000000..6344cb0 --- /dev/null +++ b/tests/roots/test-versioning/conf.py @@ -0,0 +1,3 @@ +project = 'versioning test root' +source_suffix = '.txt' +exclude_patterns = ['_build'] diff --git a/tests/roots/test-versioning/deleted.txt b/tests/roots/test-versioning/deleted.txt new file mode 100644 index 0000000..a1a9c4c --- /dev/null +++ b/tests/roots/test-versioning/deleted.txt @@ -0,0 +1,12 @@ +Versioning test text +==================== + +So the thing is I need some kind of text - not the lorem ipsum stuff, that +doesn't work out that well - to test :mod:`sphinx.versioning`. I couldn't find +a good text for that under public domain so I thought the easiest solution is +to write one by myself. It's not really interesting, in fact it is *really* +boring. + +So the previous paragraph was a bit short because I don't want to test this +only on long paragraphs, I hope it was short enough to cover most stuff. +Anyway I see this lacks ``some markup`` so I have to add a **little** bit. diff --git a/tests/roots/test-versioning/deleted_end.txt b/tests/roots/test-versioning/deleted_end.txt new file mode 100644 index 0000000..f30e630 --- /dev/null +++ b/tests/roots/test-versioning/deleted_end.txt @@ -0,0 +1,11 @@ +Versioning test text +==================== + +So the thing is I need some kind of text - not the lorem ipsum stuff, that +doesn't work out that well - to test :mod:`sphinx.versioning`. I couldn't find +a good text for that under public domain so I thought the easiest solution is +to write one by myself. It's not really interesting, in fact it is *really* +boring. + +Anyway I need more than one paragraph, at least three for the original +document, I think, and another one for two different ones. diff --git a/tests/roots/test-versioning/index.txt b/tests/roots/test-versioning/index.txt new file mode 100644 index 0000000..9d098f7 --- /dev/null +++ b/tests/roots/test-versioning/index.txt @@ -0,0 +1,13 @@ +Versioning Stuff +================ + +.. toctree:: + + original + added + insert + deleted + deleted_end + modified + insert_beginning + insert_similar diff --git a/tests/roots/test-versioning/insert.txt b/tests/roots/test-versioning/insert.txt new file mode 100644 index 0000000..1c157cc --- /dev/null +++ b/tests/roots/test-versioning/insert.txt @@ -0,0 +1,18 @@ +Versioning test text +==================== + +So the thing is I need some kind of text - not the lorem ipsum stuff, that +doesn't work out that well - to test :mod:`sphinx.versioning`. I couldn't find +a good text for that under public domain so I thought the easiest solution is +to write one by myself. It's not really interesting, in fact it is *really* +boring. + +So this paragraph is just something I inserted in this document to test if our +algorithm notices that this paragraph is not just a changed version. + +Anyway I need more than one paragraph, at least three for the original +document, I think, and another one for two different ones. + +So the previous paragraph was a bit short because I don't want to test this +only on long paragraphs, I hope it was short enough to cover most stuff. +Anyway I see this lacks ``some markup`` so I have to add a **little** bit. diff --git a/tests/roots/test-versioning/insert_beginning.txt b/tests/roots/test-versioning/insert_beginning.txt new file mode 100644 index 0000000..57102a7 --- /dev/null +++ b/tests/roots/test-versioning/insert_beginning.txt @@ -0,0 +1,18 @@ +Versioning test text +==================== + +Apperantly inserting a paragraph at the beginning of a document caused +problems earlier so this document should be used to test that. + +So the thing is I need some kind of text - not the lorem ipsum stuff, that +doesn't work out that well - to test :mod:`sphinx.versioning`. I couldn't find +a good text for that under public domain so I thought the easiest solution is +to write one by myself. It's not really interesting, in fact it is *really* +boring. + +Anyway I need more than one paragraph, at least three for the original +document, I think, and another one for two different ones. + +So the previous paragraph was a bit short because I don't want to test this +only on long paragraphs, I hope it was short enough to cover most stuff. +Anyway I see this lacks ``some markup`` so I have to add a **little** bit. diff --git a/tests/roots/test-versioning/insert_similar.txt b/tests/roots/test-versioning/insert_similar.txt new file mode 100644 index 0000000..ee9b530 --- /dev/null +++ b/tests/roots/test-versioning/insert_similar.txt @@ -0,0 +1,17 @@ +Versioning test text +==================== + +So the thing is I need some kind of text - not the lorem ipsum stuff, that +doesn't work out that well - to test :mod:`sphinx.versioning`. I couldn't find +a good text for that under public domain so I thought the easiest solution is +to write one by myself. It's not really interesting, in fact it is *really* +boring. + +Anyway I need more + +Anyway I need more than one paragraph, at least three for the original +document, I think, and another one for two different ones. + +So the previous paragraph was a bit short because I don't want to test this +only on long paragraphs, I hope it was short enough to cover most stuff. +Anyway I see this lacks ``some markup`` so I have to add a **little** bit. diff --git a/tests/roots/test-versioning/modified.txt b/tests/roots/test-versioning/modified.txt new file mode 100644 index 0000000..49cdad9 --- /dev/null +++ b/tests/roots/test-versioning/modified.txt @@ -0,0 +1,17 @@ +Versioning test text +==================== + +So the thing is I need some kind of text - not the lorem ipsum stuff, that +doesn't work out that well - to test :mod:`sphinx.versioning`. I couldn't find +a good text for that under public domain so I thought the easiest solution is +to write one by myself. Inserting something silly as a modification, btw. have +you seen the typo below?. It's not really interesting, in fact it is *really* +boring. + +Anyway I need more than one paragraph, at least three for the original +document, I think, and another one for two different ones. So this is a small +modification by adding something to this paragraph. + +So the previous paragraph was a bit short because I don't want to test this +only on long paragraphs, I hoep it was short enough to cover most stuff. +Anyway I see this lacks ``some markup`` so I have to add a **little** bit. diff --git a/tests/roots/test-versioning/original.txt b/tests/roots/test-versioning/original.txt new file mode 100644 index 0000000..b3fe060 --- /dev/null +++ b/tests/roots/test-versioning/original.txt @@ -0,0 +1,15 @@ +Versioning test text +==================== + +So the thing is I need some kind of text - not the lorem ipsum stuff, that +doesn't work out that well - to test :mod:`sphinx.versioning`. I couldn't find +a good text for that under public domain so I thought the easiest solution is +to write one by myself. It's not really interesting, in fact it is *really* +boring. + +Anyway I need more than one paragraph, at least three for the original +document, I think, and another one for two different ones. + +So the previous paragraph was a bit short because I don't want to test this +only on long paragraphs, I hope it was short enough to cover most stuff. +Anyway I see this lacks ``some markup`` so I have to add a **little** bit. diff --git a/tests/roots/test-warnings/autodoc_fodder.py b/tests/roots/test-warnings/autodoc_fodder.py new file mode 100644 index 0000000..59e4e21 --- /dev/null +++ b/tests/roots/test-warnings/autodoc_fodder.py @@ -0,0 +1,6 @@ +class MarkupError: + """ + .. note:: This is a docstring with a + small markup error which should have + correct location information. + """ diff --git a/tests/roots/test-warnings/conf.py b/tests/roots/test-warnings/conf.py new file mode 100644 index 0000000..25b8aba --- /dev/null +++ b/tests/roots/test-warnings/conf.py @@ -0,0 +1,6 @@ +import os +import sys + +sys.path.append(os.path.abspath('.')) + +extensions = ['sphinx.ext.autodoc'] diff --git a/tests/roots/test-warnings/index.rst b/tests/roots/test-warnings/index.rst new file mode 100644 index 0000000..ac52d90 --- /dev/null +++ b/tests/roots/test-warnings/index.rst @@ -0,0 +1,45 @@ +test-warnings +============= + +.. automodule:: autodoc_fodder + :no-index: + + .. autoclass:: MarkupError + +.. a non-existing image with direct filename +.. image:: foo.png + +.. a non-existing image with .* +.. image:: foo.* + +.. an SVG image (for HTML at least) +.. image:: svgimg.* + +.. should give a warning +.. literalinclude:: wrongenc.inc + :language: none + +.. a non-existing download + +Don't download :download:`this <nonexisting.png>`. + +.. Invalid index markup +.. index:: + single: + pair: + seealso: + +.. Invalid code-block +.. code-block:: c + + import sys + + sys.stdout.write('hello world!\n') + +.. unknown option + +This used to crash: :option:`&option` + +.. missing citation + +[missing]_ citation diff --git a/tests/roots/test-warnings/svgimg.pdf b/tests/roots/test-warnings/svgimg.pdf Binary files differnew file mode 100644 index 0000000..cacbd85 --- /dev/null +++ b/tests/roots/test-warnings/svgimg.pdf diff --git a/tests/roots/test-warnings/svgimg.svg b/tests/roots/test-warnings/svgimg.svg new file mode 100644 index 0000000..2bae0b9 --- /dev/null +++ b/tests/roots/test-warnings/svgimg.svg @@ -0,0 +1,4 @@ +<?xml version="1.0" encoding="UTF-8" standalone="no"?> +<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" height="60" width="60"> + <circle cx="40" cy="40" r="24" style="stroke:#000000; fill:#ffffff"/> +</svg> diff --git a/tests/roots/test-warnings/undecodable.rst b/tests/roots/test-warnings/undecodable.rst new file mode 100644 index 0000000..a4cf5c3 --- /dev/null +++ b/tests/roots/test-warnings/undecodable.rst @@ -0,0 +1,3 @@ +:orphan: + +here: diff --git a/tests/roots/test-warnings/wrongenc.inc b/tests/roots/test-warnings/wrongenc.inc new file mode 100644 index 0000000..700f613 --- /dev/null +++ b/tests/roots/test-warnings/wrongenc.inc @@ -0,0 +1,3 @@ +This file is encoded in latin-1 but at first read as utf-8. + +Max Strau a in Mnchen eine Leberkssemmel. diff --git a/tests/test_addnodes.py b/tests/test_addnodes.py new file mode 100644 index 0000000..184a696 --- /dev/null +++ b/tests/test_addnodes.py @@ -0,0 +1,51 @@ +"""Test the non-trivial features in the :mod:`sphinx.addnodes` module.""" + +from __future__ import annotations + +import pytest + +from sphinx import addnodes + + +@pytest.fixture() +def sig_elements() -> set[type[addnodes.desc_sig_element]]: + """Fixture returning the current ``addnodes.SIG_ELEMENTS`` set.""" + original = addnodes.SIG_ELEMENTS.copy() # safe copy of the current nodes + yield {*addnodes.SIG_ELEMENTS} # temporary value to use during tests + addnodes.SIG_ELEMENTS = original # restore the previous value + + +def test_desc_sig_element_nodes(sig_elements): + """Test the registration of ``desc_sig_element`` subclasses.""" + + # expected desc_sig_* node classes (must be declared *after* reloading + # the module since otherwise the objects are not the correct ones) + EXPECTED_SIG_ELEMENTS = { + addnodes.desc_sig_space, + addnodes.desc_sig_name, + addnodes.desc_sig_operator, + addnodes.desc_sig_punctuation, + addnodes.desc_sig_keyword, + addnodes.desc_sig_keyword_type, + addnodes.desc_sig_literal_number, + addnodes.desc_sig_literal_string, + addnodes.desc_sig_literal_char, + } + + assert addnodes.SIG_ELEMENTS == EXPECTED_SIG_ELEMENTS + + # create a built-in custom desc_sig_element (added to SIG_ELEMENTS) + class BuiltInSigElementLikeNode(addnodes.desc_sig_element, _sig_element=True): + pass + + # create a custom desc_sig_element (implicitly not added to SIG_ELEMENTS) + class Custom1SigElementLikeNode(addnodes.desc_sig_element): + pass + + # create a custom desc_sig_element (explicitly not added to SIG_ELEMENTS) + class Custom2SigElementLikeNode(addnodes.desc_sig_element, _sig_element=False): + pass + + assert BuiltInSigElementLikeNode in addnodes.SIG_ELEMENTS + assert Custom1SigElementLikeNode not in addnodes.SIG_ELEMENTS + assert Custom2SigElementLikeNode not in addnodes.SIG_ELEMENTS diff --git a/tests/test_api_translator.py b/tests/test_api_translator.py new file mode 100644 index 0000000..9f2bd44 --- /dev/null +++ b/tests/test_api_translator.py @@ -0,0 +1,92 @@ +"""Test the Sphinx API for translator.""" + +import sys + +import pytest + + +@pytest.fixture(scope='module', autouse=True) +def _setup_module(rootdir): + p = rootdir / 'test-api-set-translator' + sys.path.insert(0, p) + yield + sys.path.remove(p) + + +@pytest.mark.sphinx('html') +def test_html_translator(app, status, warning): + # no set_translator() + translator_class = app.builder.get_translator_class() + assert translator_class + assert translator_class.__name__ == 'HTML5Translator' + + +@pytest.mark.sphinx('html', testroot='api-set-translator') +def test_html_with_set_translator_for_html_(app, status, warning): + # use set_translator() + translator_class = app.builder.get_translator_class() + assert translator_class + assert translator_class.__name__ == 'ConfHTMLTranslator' + + +@pytest.mark.sphinx('singlehtml', testroot='api-set-translator') +def test_singlehtml_set_translator_for_singlehtml(app, status, warning): + translator_class = app.builder.get_translator_class() + assert translator_class + assert translator_class.__name__ == 'ConfSingleHTMLTranslator' + + +@pytest.mark.sphinx('pickle', testroot='api-set-translator') +def test_pickle_set_translator_for_pickle(app, status, warning): + translator_class = app.builder.get_translator_class() + assert translator_class + assert translator_class.__name__ == 'ConfPickleTranslator' + + +@pytest.mark.sphinx('json', testroot='api-set-translator') +def test_json_set_translator_for_json(app, status, warning): + translator_class = app.builder.get_translator_class() + assert translator_class + assert translator_class.__name__ == 'ConfJsonTranslator' + + +@pytest.mark.sphinx('latex', testroot='api-set-translator') +def test_html_with_set_translator_for_latex(app, status, warning): + translator_class = app.builder.get_translator_class() + assert translator_class + assert translator_class.__name__ == 'ConfLaTeXTranslator' + + +@pytest.mark.sphinx('man', testroot='api-set-translator') +def test_html_with_set_translator_for_man(app, status, warning): + translator_class = app.builder.get_translator_class() + assert translator_class + assert translator_class.__name__ == 'ConfManualPageTranslator' + + +@pytest.mark.sphinx('texinfo', testroot='api-set-translator') +def test_html_with_set_translator_for_texinfo(app, status, warning): + translator_class = app.builder.get_translator_class() + assert translator_class + assert translator_class.__name__ == 'ConfTexinfoTranslator' + + +@pytest.mark.sphinx('text', testroot='api-set-translator') +def test_html_with_set_translator_for_text(app, status, warning): + translator_class = app.builder.get_translator_class() + assert translator_class + assert translator_class.__name__ == 'ConfTextTranslator' + + +@pytest.mark.sphinx('xml', testroot='api-set-translator') +def test_html_with_set_translator_for_xml(app, status, warning): + translator_class = app.builder.get_translator_class() + assert translator_class + assert translator_class.__name__ == 'ConfXMLTranslator' + + +@pytest.mark.sphinx('pseudoxml', testroot='api-set-translator') +def test_html_with_set_translator_for_pseudoxml(app, status, warning): + translator_class = app.builder.get_translator_class() + assert translator_class + assert translator_class.__name__ == 'ConfPseudoXMLTranslator' diff --git a/tests/test_application.py b/tests/test_application.py new file mode 100644 index 0000000..a0fe268 --- /dev/null +++ b/tests/test_application.py @@ -0,0 +1,152 @@ +"""Test the Sphinx class.""" + +import shutil +import sys +from io import StringIO +from pathlib import Path +from unittest.mock import Mock + +import pytest +from docutils import nodes + +import sphinx.application +from sphinx.errors import ExtensionError +from sphinx.testing.util import SphinxTestApp, strip_escseq +from sphinx.util import logging + + +def test_instantiation(tmp_path_factory, rootdir: str, monkeypatch): + # Given + src_dir = tmp_path_factory.getbasetemp() / 'root' + + # special support for sphinx/tests + if rootdir and not src_dir.exists(): + shutil.copytree(Path(str(rootdir)) / 'test-root', src_dir) + + syspath = sys.path[:] + + # When + app_ = SphinxTestApp( + srcdir=src_dir, + status=StringIO(), + warning=StringIO(), + ) + sys.path[:] = syspath + app_.cleanup() + + # Then + assert isinstance(app_, sphinx.application.Sphinx) + + +def test_events(app, status, warning): + def empty(): + pass + with pytest.raises(ExtensionError) as excinfo: + app.connect("invalid", empty) + assert "Unknown event name: invalid" in str(excinfo.value) + + app.add_event("my_event") + with pytest.raises(ExtensionError) as excinfo: + app.add_event("my_event") + assert "Event 'my_event' already present" in str(excinfo.value) + + def mock_callback(a_app, *args): + assert a_app is app + assert emit_args == args + return "ret" + emit_args = (1, 3, "string") + listener_id = app.connect("my_event", mock_callback) + assert app.emit("my_event", *emit_args) == ["ret"], "Callback not called" + + app.disconnect(listener_id) + assert app.emit("my_event", *emit_args) == [], \ + "Callback called when disconnected" + + +def test_emit_with_nonascii_name_node(app, status, warning): + node = nodes.section(names=['\u65e5\u672c\u8a9e']) + app.emit('my_event', node) + + +def test_extensions(app, status, warning): + app.setup_extension('shutil') + warning = strip_escseq(warning.getvalue()) + assert "extension 'shutil' has no setup() function" in warning + + +def test_extension_in_blacklist(app, status, warning): + app.setup_extension('sphinxjp.themecore') + msg = strip_escseq(warning.getvalue()) + assert msg.startswith("WARNING: the extension 'sphinxjp.themecore' was") + + +@pytest.mark.sphinx(testroot='add_source_parser') +def test_add_source_parser(app, status, warning): + assert set(app.config.source_suffix) == {'.rst', '.test'} + + # .rst; only in :confval:`source_suffix` + assert '.rst' not in app.registry.get_source_parsers() + assert app.registry.source_suffix['.rst'] is None + + # .test; configured by API + assert app.registry.source_suffix['.test'] == 'test' + assert 'test' in app.registry.get_source_parsers() + assert app.registry.get_source_parsers()['test'].__name__ == 'TestSourceParser' + + +@pytest.mark.sphinx(testroot='extensions') +def test_add_is_parallel_allowed(app, status, warning): + logging.setup(app, status, warning) + + assert app.is_parallel_allowed('read') is True + assert app.is_parallel_allowed('write') is True + assert warning.getvalue() == '' + + app.setup_extension('read_parallel') + assert app.is_parallel_allowed('read') is True + assert app.is_parallel_allowed('write') is True + assert warning.getvalue() == '' + app.extensions.pop('read_parallel') + + app.setup_extension('write_parallel') + assert app.is_parallel_allowed('read') is False + assert app.is_parallel_allowed('write') is True + assert ("the write_parallel extension does not declare if it is safe " + "for parallel reading, assuming it isn't - please ") in warning.getvalue() + app.extensions.pop('write_parallel') + warning.truncate(0) # reset warnings + + app.setup_extension('read_serial') + assert app.is_parallel_allowed('read') is False + assert "the read_serial extension is not safe for parallel reading" in warning.getvalue() + warning.truncate(0) # reset warnings + assert app.is_parallel_allowed('write') is True + assert warning.getvalue() == '' + app.extensions.pop('read_serial') + + app.setup_extension('write_serial') + assert app.is_parallel_allowed('read') is False + assert app.is_parallel_allowed('write') is False + assert ("the write_serial extension does not declare if it is safe " + "for parallel reading, assuming it isn't - please ") in warning.getvalue() + app.extensions.pop('write_serial') + warning.truncate(0) # reset warnings + + +@pytest.mark.sphinx('dummy', testroot='root') +def test_build_specific(app): + app.builder.build = Mock() + filenames = [app.srcdir / 'index.txt', # normal + app.srcdir / 'images', # without suffix + app.srcdir / 'notfound.txt', # not found + app.srcdir / 'img.png', # unknown suffix + '/index.txt', # external file + app.srcdir / 'subdir', # directory + app.srcdir / 'subdir/includes.txt', # file on subdir + app.srcdir / 'subdir/../subdir/excluded.txt'] # not normalized + app.build(False, filenames) + + expected = ['index', 'subdir/includes', 'subdir/excluded'] + app.builder.build.assert_called_with(expected, + method='specific', + summary='3 source files given on command line') diff --git a/tests/test_build.py b/tests/test_build.py new file mode 100644 index 0000000..ed4bc43 --- /dev/null +++ b/tests/test_build.py @@ -0,0 +1,135 @@ +"""Test all builders.""" + +import os +import shutil +from unittest import mock + +import pytest +from docutils import nodes + +from sphinx.errors import SphinxError + + +def request_session_head(url, **kwargs): + response = mock.Mock() + response.status_code = 200 + response.url = url + return response + + +@pytest.fixture() +def nonascii_srcdir(request, rootdir, sphinx_test_tempdir): + # Build in a non-ASCII source dir + test_name = '\u65e5\u672c\u8a9e' + basedir = sphinx_test_tempdir / request.node.originalname + srcdir = basedir / test_name + if not srcdir.exists(): + shutil.copytree(rootdir / 'test-root', srcdir) + + # add a doc with a non-ASCII file name to the source dir + (srcdir / (test_name + '.txt')).write_text(""" +nonascii file name page +======================= +""", encoding='utf8') + + root_doc = srcdir / 'index.txt' + root_doc.write_text(root_doc.read_text(encoding='utf8') + f""" +.. toctree:: + +{test_name}/{test_name} +""", encoding='utf8') + return srcdir + + +# note: this test skips building docs for some builders because they have independent testcase. +# (html, changes, epub, latex, texinfo and manpage) +@pytest.mark.parametrize( + "buildername", + ['dirhtml', 'singlehtml', 'text', 'xml', 'pseudoxml', 'linkcheck'], +) +@mock.patch('sphinx.builders.linkcheck.requests.head', + side_effect=request_session_head) +def test_build_all(requests_head, make_app, nonascii_srcdir, buildername): + app = make_app(buildername, srcdir=nonascii_srcdir) + app.build() + + +def test_root_doc_not_found(tmp_path, make_app): + (tmp_path / 'conf.py').write_text('', encoding='utf8') + assert os.listdir(tmp_path) == ['conf.py'] + + app = make_app('dummy', srcdir=tmp_path) + with pytest.raises(SphinxError): + app.builder.build_all() # no index.rst + + +@pytest.mark.sphinx(buildername='text', testroot='circular') +def test_circular_toctree(app, status, warning): + app.builder.build_all() + warnings = warning.getvalue() + assert ( + 'circular toctree references detected, ignoring: ' + 'sub <- index <- sub') in warnings + assert ( + 'circular toctree references detected, ignoring: ' + 'index <- sub <- index') in warnings + + +@pytest.mark.sphinx(buildername='text', testroot='numbered-circular') +def test_numbered_circular_toctree(app, status, warning): + app.builder.build_all() + warnings = warning.getvalue() + assert ( + 'circular toctree references detected, ignoring: ' + 'sub <- index <- sub') in warnings + assert ( + 'circular toctree references detected, ignoring: ' + 'index <- sub <- index') in warnings + + +@pytest.mark.sphinx(buildername='dummy', testroot='images') +def test_image_glob(app, status, warning): + app.builder.build_all() + + # index.rst + doctree = app.env.get_doctree('index') + + assert isinstance(doctree[0][1], nodes.image) + assert doctree[0][1]['candidates'] == {'*': 'rimg.png'} + assert doctree[0][1]['uri'] == 'rimg.png' + + assert isinstance(doctree[0][2], nodes.figure) + assert isinstance(doctree[0][2][0], nodes.image) + assert doctree[0][2][0]['candidates'] == {'*': 'rimg.png'} + assert doctree[0][2][0]['uri'] == 'rimg.png' + + assert isinstance(doctree[0][3], nodes.image) + assert doctree[0][3]['candidates'] == {'application/pdf': 'img.pdf', + 'image/gif': 'img.gif', + 'image/png': 'img.png'} + assert doctree[0][3]['uri'] == 'img.*' + + assert isinstance(doctree[0][4], nodes.figure) + assert isinstance(doctree[0][4][0], nodes.image) + assert doctree[0][4][0]['candidates'] == {'application/pdf': 'img.pdf', + 'image/gif': 'img.gif', + 'image/png': 'img.png'} + assert doctree[0][4][0]['uri'] == 'img.*' + + # subdir/index.rst + doctree = app.env.get_doctree('subdir/index') + + assert isinstance(doctree[0][1], nodes.image) + assert doctree[0][1]['candidates'] == {'*': 'subdir/rimg.png'} + assert doctree[0][1]['uri'] == 'subdir/rimg.png' + + assert isinstance(doctree[0][2], nodes.image) + assert doctree[0][2]['candidates'] == {'application/pdf': 'subdir/svgimg.pdf', + 'image/svg+xml': 'subdir/svgimg.svg'} + assert doctree[0][2]['uri'] == 'subdir/svgimg.*' + + assert isinstance(doctree[0][3], nodes.figure) + assert isinstance(doctree[0][3][0], nodes.image) + assert doctree[0][3][0]['candidates'] == {'application/pdf': 'subdir/svgimg.pdf', + 'image/svg+xml': 'subdir/svgimg.svg'} + assert doctree[0][3][0]['uri'] == 'subdir/svgimg.*' diff --git a/tests/test_build_changes.py b/tests/test_build_changes.py new file mode 100644 index 0000000..b340c8d --- /dev/null +++ b/tests/test_build_changes.py @@ -0,0 +1,34 @@ +"""Test the ChangesBuilder class.""" + +import pytest + + +@pytest.mark.sphinx('changes', testroot='changes') +def test_build(app): + app.build() + + # TODO: Use better checking of html content + htmltext = (app.outdir / 'changes.html').read_text(encoding='utf8') + assert 'New in version 0.6: Some funny stuff.' in htmltext + assert 'Changed in version 0.6: Even more funny stuff.' in htmltext + assert 'Deprecated since version 0.6: Boring stuff.' in htmltext + + path_html = ( + '<b>Path</b>: <i>deprecated:</i> Deprecated since version 0.6:' + ' So, that was a bad idea it turns out.') + assert path_html in htmltext + + malloc_html = ( + '<b>void *Test_Malloc(size_t n)</b>: <i>changed:</i> Changed in version 0.6:' + ' Can now be replaced with a different allocator.</a>') + assert malloc_html in htmltext + + +@pytest.mark.sphinx( + 'changes', testroot='changes', srcdir='changes-none', + confoverrides={'version': '0.7', 'release': '0.7b1'}) +def test_no_changes(app, status): + app.build() + + assert 'no changes in version 0.7.' in status.getvalue() + assert not (app.outdir / 'changes.html').exists() diff --git a/tests/test_build_dirhtml.py b/tests/test_build_dirhtml.py new file mode 100644 index 0000000..dc5ab86 --- /dev/null +++ b/tests/test_build_dirhtml.py @@ -0,0 +1,40 @@ +"""Test dirhtml builder.""" + +import posixpath + +import pytest + +from sphinx.util.inventory import InventoryFile + + +@pytest.mark.sphinx(buildername='dirhtml', testroot='builder-dirhtml') +def test_dirhtml(app, status, warning): + app.build() + + assert (app.outdir / 'index.html').exists() + assert (app.outdir / 'foo/index.html').exists() + assert (app.outdir / 'foo/foo_1/index.html').exists() + assert (app.outdir / 'foo/foo_2/index.html').exists() + assert (app.outdir / 'bar/index.html').exists() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert 'href="foo/"' in content + assert 'href="foo/foo_1/"' in content + assert 'href="foo/foo_2/"' in content + assert 'href="bar/"' in content + + # objects.inv (refs: #7095) + with (app.outdir / 'objects.inv').open('rb') as f: + invdata = InventoryFile.load(f, 'path/to', posixpath.join) + + assert 'index' in invdata.get('std:doc') + assert invdata['std:doc']['index'] == ('Python', '', 'path/to/', '-') + + assert 'foo/index' in invdata.get('std:doc') + assert invdata['std:doc']['foo/index'] == ('Python', '', 'path/to/foo/', '-') + + assert 'index' in invdata.get('std:label') + assert invdata['std:label']['index'] == ('Python', '', 'path/to/#index', '-') + + assert 'foo' in invdata.get('std:label') + assert invdata['std:label']['foo'] == ('Python', '', 'path/to/foo/#foo', 'foo/index') diff --git a/tests/test_build_epub.py b/tests/test_build_epub.py new file mode 100644 index 0000000..7f5b815 --- /dev/null +++ b/tests/test_build_epub.py @@ -0,0 +1,411 @@ +"""Test the HTML builder and check output against XPath.""" + +import os +import subprocess +from pathlib import Path +from subprocess import CalledProcessError +from xml.etree import ElementTree + +import pytest + +from sphinx.builders.epub3 import _XML_NAME_PATTERN + + +# check given command is runnable +def runnable(command): + try: + subprocess.run(command, capture_output=True, check=True) + return True + except (OSError, CalledProcessError): + return False # command not found or exit with non-zero + + +class EPUBElementTree: + """Test helper for content.opf and toc.ncx""" + namespaces = { + 'idpf': 'http://www.idpf.org/2007/opf', + 'dc': 'http://purl.org/dc/elements/1.1/', + 'ibooks': 'http://vocabulary.itunes.apple.com/rdf/ibooks/vocabulary-extensions-1.0/', + 'ncx': 'http://www.daisy.org/z3986/2005/ncx/', + 'xhtml': 'http://www.w3.org/1999/xhtml', + 'epub': 'http://www.idpf.org/2007/ops', + } + + def __init__(self, tree): + self.tree = tree + + @classmethod + def fromstring(cls, string): + tree = ElementTree.fromstring(string) # NoQA: S314 # using known data in tests + return cls(tree) + + def find(self, match): + ret = self.tree.find(match, namespaces=self.namespaces) + if ret is not None: + return self.__class__(ret) + else: + return ret + + def findall(self, match): + ret = self.tree.findall(match, namespaces=self.namespaces) + return [self.__class__(e) for e in ret] + + def __getattr__(self, name): + return getattr(self.tree, name) + + def __iter__(self): + for child in self.tree: + yield self.__class__(child) + + +@pytest.mark.sphinx('epub', testroot='basic') +def test_build_epub(app): + app.builder.build_all() + assert (app.outdir / 'mimetype').read_text(encoding='utf8') == 'application/epub+zip' + assert (app.outdir / 'META-INF' / 'container.xml').exists() + + # toc.ncx + toc = EPUBElementTree.fromstring((app.outdir / 'toc.ncx').read_text(encoding='utf8')) + assert toc.find("./ncx:docTitle/ncx:text").text == 'Python' + + # toc.ncx / head + meta = list(toc.find("./ncx:head")) + assert meta[0].attrib == {'name': 'dtb:uid', 'content': 'unknown'} + assert meta[1].attrib == {'name': 'dtb:depth', 'content': '1'} + assert meta[2].attrib == {'name': 'dtb:totalPageCount', 'content': '0'} + assert meta[3].attrib == {'name': 'dtb:maxPageNumber', 'content': '0'} + + # toc.ncx / navMap + navpoints = toc.findall("./ncx:navMap/ncx:navPoint") + assert len(navpoints) == 1 + assert navpoints[0].attrib == {'id': 'navPoint1', 'playOrder': '1'} + assert navpoints[0].find("./ncx:content").attrib == {'src': 'index.xhtml'} + + navlabel = navpoints[0].find("./ncx:navLabel/ncx:text") + assert navlabel.text == 'The basic Sphinx documentation for testing' + + # content.opf + opf = EPUBElementTree.fromstring((app.outdir / 'content.opf').read_text(encoding='utf8')) + + # content.opf / metadata + metadata = opf.find("./idpf:metadata") + assert metadata.find("./dc:language").text == 'en' + assert metadata.find("./dc:title").text == 'Python' + assert metadata.find("./dc:description").text == 'unknown' + assert metadata.find("./dc:creator").text == 'unknown' + assert metadata.find("./dc:contributor").text == 'unknown' + assert metadata.find("./dc:publisher").text == 'unknown' + assert metadata.find("./dc:rights").text is None + assert metadata.find("./idpf:meta[@property='ibooks:version']").text is None + assert metadata.find("./idpf:meta[@property='ibooks:specified-fonts']").text == 'true' + assert metadata.find("./idpf:meta[@property='ibooks:binding']").text == 'true' + assert metadata.find("./idpf:meta[@property='ibooks:scroll-axis']").text == 'vertical' + + # content.opf / manifest + manifest = opf.find("./idpf:manifest") + items = list(manifest) + assert items[0].attrib == {'id': 'ncx', + 'href': 'toc.ncx', + 'media-type': 'application/x-dtbncx+xml'} + assert items[1].attrib == {'id': 'nav', + 'href': 'nav.xhtml', + 'media-type': 'application/xhtml+xml', + 'properties': 'nav'} + assert items[2].attrib == {'id': 'epub-0', + 'href': 'genindex.xhtml', + 'media-type': 'application/xhtml+xml'} + assert items[3].attrib == {'id': 'epub-1', + 'href': 'index.xhtml', + 'media-type': 'application/xhtml+xml'} + + for i, item in enumerate(items[2:]): + # items are named as epub-NN + assert item.get('id') == 'epub-%d' % i + + # content.opf / spine + spine = opf.find("./idpf:spine") + itemrefs = list(spine) + assert spine.get('toc') == 'ncx' + assert spine.get('page-progression-direction') == 'ltr' + assert itemrefs[0].get('idref') == 'epub-1' + assert itemrefs[1].get('idref') == 'epub-0' + + # content.opf / guide + reference = opf.find("./idpf:guide/idpf:reference") + assert reference.get('type') == 'toc' + assert reference.get('title') == 'Table of Contents' + assert reference.get('href') == 'index.xhtml' + + # nav.xhtml + nav = EPUBElementTree.fromstring((app.outdir / 'nav.xhtml').read_text(encoding='utf8')) + assert nav.attrib == {'lang': 'en', + '{http://www.w3.org/XML/1998/namespace}lang': 'en'} + assert nav.find("./xhtml:head/xhtml:title").text == 'Table of Contents' + + # nav.xhtml / nav + navlist = nav.find("./xhtml:body/xhtml:nav") + toc = navlist.findall("./xhtml:ol/xhtml:li") + assert navlist.find("./xhtml:h1").text == 'Table of Contents' + assert len(toc) == 1 + assert toc[0].find("./xhtml:a").get("href") == 'index.xhtml' + assert toc[0].find("./xhtml:a").text == 'The basic Sphinx documentation for testing' + + +@pytest.mark.sphinx('epub', testroot='footnotes', + confoverrides={'epub_cover': ('_images/rimg.png', None)}) +def test_epub_cover(app): + app.build() + + # content.opf / metadata + opf = EPUBElementTree.fromstring((app.outdir / 'content.opf').read_text(encoding='utf8')) + cover_image = opf.find("./idpf:manifest/idpf:item[@href='%s']" % app.config.epub_cover[0]) + cover = opf.find("./idpf:metadata/idpf:meta[@name='cover']") + assert cover + assert cover.get('content') == cover_image.get('id') + + +@pytest.mark.sphinx('epub', testroot='toctree') +def test_nested_toc(app): + app.build() + + # toc.ncx + toc = EPUBElementTree.fromstring((app.outdir / 'toc.ncx').read_bytes()) + assert toc.find("./ncx:docTitle/ncx:text").text == 'Python' + + # toc.ncx / navPoint + def navinfo(elem): + label = elem.find("./ncx:navLabel/ncx:text") + content = elem.find("./ncx:content") + return (elem.get('id'), elem.get('playOrder'), + content.get('src'), label.text) + + navpoints = toc.findall("./ncx:navMap/ncx:navPoint") + assert len(navpoints) == 4 + assert navinfo(navpoints[0]) == ('navPoint1', '1', 'index.xhtml', + "Welcome to Sphinx Tests’s documentation!") + assert navpoints[0].findall("./ncx:navPoint") == [] + + # toc.ncx / nested navPoints + assert navinfo(navpoints[1]) == ('navPoint2', '2', 'foo.xhtml', 'foo') + navchildren = navpoints[1].findall("./ncx:navPoint") + assert len(navchildren) == 4 + assert navinfo(navchildren[0]) == ('navPoint3', '2', 'foo.xhtml', 'foo') + assert navinfo(navchildren[1]) == ('navPoint4', '3', 'quux.xhtml', 'quux') + assert navinfo(navchildren[2]) == ('navPoint5', '4', 'foo.xhtml#foo-1', 'foo.1') + assert navinfo(navchildren[3]) == ('navPoint8', '6', 'foo.xhtml#foo-2', 'foo.2') + + # nav.xhtml / nav + def navinfo(elem): + anchor = elem.find("./xhtml:a") + return (anchor.get('href'), anchor.text) + + nav = EPUBElementTree.fromstring((app.outdir / 'nav.xhtml').read_bytes()) + toc = nav.findall("./xhtml:body/xhtml:nav/xhtml:ol/xhtml:li") + assert len(toc) == 4 + assert navinfo(toc[0]) == ('index.xhtml', + "Welcome to Sphinx Tests’s documentation!") + assert toc[0].findall("./xhtml:ol") == [] + + # nav.xhtml / nested toc + assert navinfo(toc[1]) == ('foo.xhtml', 'foo') + tocchildren = toc[1].findall("./xhtml:ol/xhtml:li") + assert len(tocchildren) == 3 + assert navinfo(tocchildren[0]) == ('quux.xhtml', 'quux') + assert navinfo(tocchildren[1]) == ('foo.xhtml#foo-1', 'foo.1') + assert navinfo(tocchildren[2]) == ('foo.xhtml#foo-2', 'foo.2') + + grandchild = tocchildren[1].findall("./xhtml:ol/xhtml:li") + assert len(grandchild) == 1 + assert navinfo(grandchild[0]) == ('foo.xhtml#foo-1-1', 'foo.1-1') + + +@pytest.mark.sphinx('epub', testroot='need-escaped') +def test_escaped_toc(app): + app.build() + + # toc.ncx + toc = EPUBElementTree.fromstring((app.outdir / 'toc.ncx').read_bytes()) + assert toc.find("./ncx:docTitle/ncx:text").text == 'need <b>"escaped"</b> project' + + # toc.ncx / navPoint + def navinfo(elem): + label = elem.find("./ncx:navLabel/ncx:text") + content = elem.find("./ncx:content") + return (elem.get('id'), elem.get('playOrder'), + content.get('src'), label.text) + + navpoints = toc.findall("./ncx:navMap/ncx:navPoint") + assert len(navpoints) == 4 + assert navinfo(navpoints[0]) == ('navPoint1', '1', 'index.xhtml', + "Welcome to Sphinx Tests's documentation!") + assert navpoints[0].findall("./ncx:navPoint") == [] + + # toc.ncx / nested navPoints + assert navinfo(navpoints[1]) == ('navPoint2', '2', 'foo.xhtml', '<foo>') + navchildren = navpoints[1].findall("./ncx:navPoint") + assert len(navchildren) == 4 + assert navinfo(navchildren[0]) == ('navPoint3', '2', 'foo.xhtml', '<foo>') + assert navinfo(navchildren[1]) == ('navPoint4', '3', 'quux.xhtml', 'quux') + assert navinfo(navchildren[2]) == ('navPoint5', '4', 'foo.xhtml#foo-1', 'foo “1”') + assert navinfo(navchildren[3]) == ('navPoint8', '6', 'foo.xhtml#foo-2', 'foo.2') + + # nav.xhtml / nav + def navinfo(elem): + anchor = elem.find("./xhtml:a") + return (anchor.get('href'), anchor.text) + + nav = EPUBElementTree.fromstring((app.outdir / 'nav.xhtml').read_bytes()) + toc = nav.findall("./xhtml:body/xhtml:nav/xhtml:ol/xhtml:li") + assert len(toc) == 4 + assert navinfo(toc[0]) == ('index.xhtml', + "Welcome to Sphinx Tests's documentation!") + assert toc[0].findall("./xhtml:ol") == [] + + # nav.xhtml / nested toc + assert navinfo(toc[1]) == ('foo.xhtml', '<foo>') + tocchildren = toc[1].findall("./xhtml:ol/xhtml:li") + assert len(tocchildren) == 3 + assert navinfo(tocchildren[0]) == ('quux.xhtml', 'quux') + assert navinfo(tocchildren[1]) == ('foo.xhtml#foo-1', 'foo “1”') + assert navinfo(tocchildren[2]) == ('foo.xhtml#foo-2', 'foo.2') + + grandchild = tocchildren[1].findall("./xhtml:ol/xhtml:li") + assert len(grandchild) == 1 + assert navinfo(grandchild[0]) == ('foo.xhtml#foo-1-1', 'foo.1-1') + + +@pytest.mark.sphinx('epub', testroot='basic') +def test_epub_writing_mode(app): + # horizontal (default) + app.builder.build_all() + + # horizontal / page-progression-direction + opf = EPUBElementTree.fromstring((app.outdir / 'content.opf').read_text(encoding='utf8')) + assert opf.find("./idpf:spine").get('page-progression-direction') == 'ltr' + + # horizontal / ibooks:scroll-axis + metadata = opf.find("./idpf:metadata") + assert metadata.find("./idpf:meta[@property='ibooks:scroll-axis']").text == 'vertical' + + # horizontal / writing-mode (CSS) + css = (app.outdir / '_static' / 'epub.css').read_text(encoding='utf8') + assert 'writing-mode: horizontal-tb;' in css + + # vertical + app.config.epub_writing_mode = 'vertical' + (app.outdir / 'index.xhtml').unlink() # forcely rebuild + app.build() + + # vertical / page-progression-direction + opf = EPUBElementTree.fromstring((app.outdir / 'content.opf').read_text(encoding='utf8')) + assert opf.find("./idpf:spine").get('page-progression-direction') == 'rtl' + + # vertical / ibooks:scroll-axis + metadata = opf.find("./idpf:metadata") + assert metadata.find("./idpf:meta[@property='ibooks:scroll-axis']").text == 'horizontal' + + # vertical / writing-mode (CSS) + css = (app.outdir / '_static' / 'epub.css').read_text(encoding='utf8') + assert 'writing-mode: vertical-rl;' in css + + +@pytest.mark.sphinx('epub', testroot='epub-anchor-id') +def test_epub_anchor_id(app): + app.build() + + html = (app.outdir / 'index.xhtml').read_text(encoding='utf8') + assert ('<p id="std-setting-STATICFILES_FINDERS">' + 'blah blah blah</p>' in html) + assert ('<span id="std-setting-STATICFILES_SECTION"></span>' + '<h1>blah blah blah</h1>' in html) + assert 'see <a class="reference internal" href="#std-setting-STATICFILES_FINDERS">' in html + + +@pytest.mark.sphinx('epub', testroot='html_assets') +def test_epub_assets(app): + app.builder.build_all() + + # epub_sytlesheets (same as html_css_files) + content = (app.outdir / 'index.xhtml').read_text(encoding='utf8') + assert ('<link rel="stylesheet" type="text/css" href="_static/css/style.css" />' + in content) + assert ('<link media="print" rel="stylesheet" title="title" type="text/css" ' + 'href="https://example.com/custom.css" />' in content) + + +@pytest.mark.sphinx('epub', testroot='html_assets', + confoverrides={'epub_css_files': ['css/epub.css']}) +def test_epub_css_files(app): + app.builder.build_all() + + # epub_css_files + content = (app.outdir / 'index.xhtml').read_text(encoding='utf8') + assert '<link rel="stylesheet" type="text/css" href="_static/css/epub.css" />' in content + + # files in html_css_files are not outputted + assert ('<link rel="stylesheet" type="text/css" href="_static/css/style.css" />' + not in content) + assert ('<link media="print" rel="stylesheet" title="title" type="text/css" ' + 'href="https://example.com/custom.css" />' not in content) + + +@pytest.mark.sphinx('epub', testroot='roles-download') +def test_html_download_role(app, status, warning): + app.build() + assert not (app.outdir / '_downloads' / 'dummy.dat').exists() + + content = (app.outdir / 'index.xhtml').read_text(encoding='utf8') + assert ('<li><p><code class="xref download docutils literal notranslate">' + '<span class="pre">dummy.dat</span></code></p></li>' in content) + assert ('<li><p><code class="xref download docutils literal notranslate">' + '<span class="pre">not_found.dat</span></code></p></li>' in content) + assert ('<li><p><code class="xref download docutils literal notranslate">' + '<span class="pre">Sphinx</span> <span class="pre">logo</span></code>' + '<span class="link-target"> [http://www.sphinx-doc.org/en/master' + '/_static/sphinxheader.png]</span></p></li>' in content) + + +@pytest.mark.sphinx('epub', testroot='toctree-duplicated') +def test_duplicated_toctree_entry(app, status, warning): + app.builder.build_all() + assert 'WARNING: duplicated ToC entry found: foo.xhtml' in warning.getvalue() + + +@pytest.mark.skipif('DO_EPUBCHECK' not in os.environ, + reason='Skipped because DO_EPUBCHECK is not set') +@pytest.mark.sphinx('epub') +def test_run_epubcheck(app): + app.build() + + epubcheck = os.environ.get('EPUBCHECK_PATH', '/usr/share/java/epubcheck.jar') + if runnable(['java', '-version']) and os.path.exists(epubcheck): + try: + subprocess.run(['java', '-jar', epubcheck, app.outdir / 'SphinxTests.epub'], + capture_output=True, check=True) + except CalledProcessError as exc: + print(exc.stdout.decode('utf-8')) + print(exc.stderr.decode('utf-8')) + msg = f'epubcheck exited with return code {exc.returncode}' + raise AssertionError(msg) from exc + + +def test_xml_name_pattern_check(): + assert _XML_NAME_PATTERN.match('id-pub') + assert _XML_NAME_PATTERN.match('webpage') + assert not _XML_NAME_PATTERN.match('1bfda21') + + +@pytest.mark.sphinx('epub', testroot='images') +def test_copy_images(app, status, warning): + app.build() + + images_dir = Path(app.outdir) / '_images' + images = {image.name for image in images_dir.rglob('*')} + images.discard('python-logo.png') + assert images == { + 'img.png', + 'rimg.png', + 'rimg1.png', + 'svgimg.svg', + 'testimäge.png', + } diff --git a/tests/test_build_gettext.py b/tests/test_build_gettext.py new file mode 100644 index 0000000..6d9154e --- /dev/null +++ b/tests/test_build_gettext.py @@ -0,0 +1,235 @@ +"""Test the build process with gettext builder with the test root.""" + +import gettext +import os +import re +import subprocess +import sys +from subprocess import CalledProcessError + +import pytest + +from sphinx.builders.gettext import Catalog, MsgOrigin + +if sys.version_info[:2] >= (3, 11): + from contextlib import chdir +else: + from sphinx.util.osutil import _chdir as chdir + +_MSGID_PATTERN = re.compile(r'msgid "(.*)"') + + +def msgid_getter(msgid): + if m := _MSGID_PATTERN.search(msgid): + return m[1] + return None + + +def test_Catalog_duplicated_message(): + catalog = Catalog() + catalog.add('hello', MsgOrigin('/path/to/filename', 1)) + catalog.add('hello', MsgOrigin('/path/to/filename', 1)) + catalog.add('hello', MsgOrigin('/path/to/filename', 2)) + catalog.add('hello', MsgOrigin('/path/to/yetanother', 1)) + catalog.add('world', MsgOrigin('/path/to/filename', 1)) + + assert len(list(catalog)) == 2 + + msg1, msg2 = list(catalog) + assert msg1.text == 'hello' + assert msg1.locations == [('/path/to/filename', 1), + ('/path/to/filename', 2), + ('/path/to/yetanother', 1)] + assert msg2.text == 'world' + assert msg2.locations == [('/path/to/filename', 1)] + + +@pytest.mark.sphinx('gettext', srcdir='root-gettext') +def test_build_gettext(app): + # Generic build; should fail only when the builder is horribly broken. + app.builder.build_all() + + # Do messages end up in the correct location? + # top-level documents end up in a message catalog + assert (app.outdir / 'extapi.pot').is_file() + # directory items are grouped into sections + assert (app.outdir / 'subdir.pot').is_file() + + # regression test for issue #960 + catalog = (app.outdir / 'markup.pot').read_text(encoding='utf8') + assert 'msgid "something, something else, something more"' in catalog + + +@pytest.mark.sphinx('gettext', srcdir='root-gettext') +def test_msgfmt(app): + app.builder.build_all() + + (app.outdir / 'en' / 'LC_MESSAGES').mkdir(parents=True, exist_ok=True) + with chdir(app.outdir): + try: + args = ['msginit', '--no-translator', '-i', 'markup.pot', '--locale', 'en_US'] + subprocess.run(args, capture_output=True, check=True) + except OSError: + pytest.skip() # most likely msginit was not found + except CalledProcessError as exc: + print(exc.stdout) + print(exc.stderr) + msg = f'msginit exited with return code {exc.returncode}' + raise AssertionError(msg) from exc + + assert (app.outdir / 'en_US.po').is_file(), 'msginit failed' + try: + args = ['msgfmt', 'en_US.po', + '-o', os.path.join('en', 'LC_MESSAGES', 'test_root.mo')] + subprocess.run(args, capture_output=True, check=True) + except OSError: + pytest.skip() # most likely msgfmt was not found + except CalledProcessError as exc: + print(exc.stdout) + print(exc.stderr) + msg = f'msgfmt exited with return code {exc.returncode}' + raise AssertionError(msg) from exc + + mo = app.outdir / 'en' / 'LC_MESSAGES' / 'test_root.mo' + assert mo.is_file(), 'msgfmt failed' + + _ = gettext.translation('test_root', app.outdir, languages=['en']).gettext + assert _("Testing various markup") == "Testing various markup" + + +@pytest.mark.sphinx( + 'gettext', testroot='intl', srcdir='gettext', + confoverrides={'gettext_compact': False}) +def test_gettext_index_entries(app): + # regression test for #976 + app.builder.build(['index_entries']) + + pot = (app.outdir / 'index_entries.pot').read_text(encoding='utf8') + msg_ids = list(filter(None, map(msgid_getter, pot.splitlines()))) + + assert msg_ids == [ + "i18n with index entries", + "index target section", + "this is :index:`Newsletter` target paragraph.", + "various index entries", + "That's all.", + "Mailing List", + "Newsletter", + "Recipients List", + "First", + "Second", + "Third", + "Entry", + "See", + ] + + +@pytest.mark.sphinx( + 'gettext', testroot='intl', srcdir='gettext', + confoverrides={'gettext_compact': False, + 'gettext_additional_targets': []}) +def test_gettext_disable_index_entries(app): + # regression test for #976 + app.env._pickled_doctree_cache.clear() # clear cache + app.builder.build(['index_entries']) + + pot = (app.outdir / 'index_entries.pot').read_text(encoding='utf8') + msg_ids = list(filter(None, map(msgid_getter, pot.splitlines()))) + + assert msg_ids == [ + "i18n with index entries", + "index target section", + "this is :index:`Newsletter` target paragraph.", + "various index entries", + "That's all.", + ] + + +@pytest.mark.sphinx('gettext', testroot='intl', srcdir='gettext') +def test_gettext_template(app): + app.builder.build_all() + + assert (app.outdir / 'sphinx.pot').is_file() + + result = (app.outdir / 'sphinx.pot').read_text(encoding='utf8') + assert "Welcome" in result + assert "Sphinx %(version)s" in result + + +@pytest.mark.sphinx('gettext', testroot='gettext-template') +def test_gettext_template_msgid_order_in_sphinxpot(app): + app.builder.build_all() + assert (app.outdir / 'sphinx.pot').is_file() + + result = (app.outdir / 'sphinx.pot').read_text(encoding='utf8') + assert re.search( + ('msgid "Template 1".*' + 'msgid "This is Template 1\\.".*' + 'msgid "Template 2".*' + 'msgid "This is Template 2\\.".*'), + result, + flags=re.DOTALL) + + +@pytest.mark.sphinx( + 'gettext', srcdir='root-gettext', + confoverrides={'gettext_compact': 'documentation'}) +def test_build_single_pot(app): + app.builder.build_all() + + assert (app.outdir / 'documentation.pot').is_file() + + result = (app.outdir / 'documentation.pot').read_text(encoding='utf8') + assert re.search( + ('msgid "Todo".*' + 'msgid "Like footnotes.".*' + 'msgid "The minute.".*' + 'msgid "Generated section".*'), + result, + flags=re.DOTALL) + + +@pytest.mark.sphinx( + 'gettext', + testroot='intl_substitution_definitions', + srcdir='gettext-subst', + confoverrides={'gettext_compact': False, + 'gettext_additional_targets': ['image']}) +def test_gettext_prolog_epilog_substitution(app): + app.builder.build_all() + + assert (app.outdir / 'prolog_epilog_substitution.pot').is_file() + pot = (app.outdir / 'prolog_epilog_substitution.pot').read_text(encoding='utf8') + msg_ids = list(filter(None, map(msgid_getter, pot.splitlines()))) + + assert msg_ids == [ + "i18n with prologue and epilogue substitutions", + "This is content that contains |subst_prolog_1|.", + "Substituted image |subst_prolog_2| here.", + "subst_prolog_2", + ".. image:: /img.png", + "This is content that contains |subst_epilog_1|.", + "Substituted image |subst_epilog_2| here.", + "subst_epilog_2", + ".. image:: /i18n.png", + ] + + +@pytest.mark.sphinx( + 'gettext', + testroot='intl_substitution_definitions', + srcdir='gettext-subst', + confoverrides={'gettext_compact': False, + 'gettext_additional_targets': ['image']}) +def test_gettext_prolog_epilog_substitution_excluded(app): + # regression test for #9428 + app.builder.build_all() + + assert (app.outdir / 'prolog_epilog_substitution_excluded.pot').is_file() + pot = (app.outdir / 'prolog_epilog_substitution_excluded.pot').read_text(encoding='utf8') + msg_ids = list(filter(None, map(msgid_getter, pot.splitlines()))) + + assert msg_ids == [ + "i18n without prologue and epilogue substitutions", + "This is content that does not include prologue and epilogue substitutions.", + ] diff --git a/tests/test_build_html.py b/tests/test_build_html.py new file mode 100644 index 0000000..07f101d --- /dev/null +++ b/tests/test_build_html.py @@ -0,0 +1,1841 @@ +"""Test the HTML builder and check output against XPath.""" + +import hashlib +import os +import posixpath +import re +from itertools import chain, cycle +from pathlib import Path +from unittest.mock import ANY, call, patch + +import pytest +from html5lib import HTMLParser + +import sphinx.builders.html +from sphinx.builders.html import validate_html_extra_path, validate_html_static_path +from sphinx.builders.html._assets import _file_checksum +from sphinx.errors import ConfigError, ThemeError +from sphinx.testing.util import strip_escseq +from sphinx.util.inventory import InventoryFile + +FIGURE_CAPTION = ".//figure/figcaption/p" + + +ENV_WARNINGS = """\ +%(root)s/autodoc_fodder.py:docstring of autodoc_fodder.MarkupError:\\d+: \ +WARNING: Explicit markup ends without a blank line; unexpected unindent. +%(root)s/index.rst:\\d+: WARNING: Encoding 'utf-8-sig' used for reading included \ +file '%(root)s/wrongenc.inc' seems to be wrong, try giving an :encoding: option +%(root)s/index.rst:\\d+: WARNING: invalid single index entry '' +%(root)s/index.rst:\\d+: WARNING: image file not readable: foo.png +%(root)s/index.rst:\\d+: WARNING: download file not readable: %(root)s/nonexisting.png +%(root)s/undecodable.rst:\\d+: WARNING: undecodable source characters, replacing \ +with "\\?": b?'here: >>>(\\\\|/)xbb<<<((\\\\|/)r)?' +""" + +HTML_WARNINGS = ENV_WARNINGS + """\ +%(root)s/index.rst:\\d+: WARNING: unknown option: '&option' +%(root)s/index.rst:\\d+: WARNING: citation not found: missing +%(root)s/index.rst:\\d+: WARNING: a suitable image for html builder not found: foo.\\* +%(root)s/index.rst:\\d+: WARNING: Lexing literal_block ".*" as "c" resulted in an error at token: ".*". Retrying in relaxed mode. +""" + + +etree_cache = {} + + +@pytest.fixture(scope='module') +def cached_etree_parse(): + def parse(fname): + if fname in etree_cache: + return etree_cache[fname] + with (fname).open('rb') as fp: + etree = HTMLParser(namespaceHTMLElements=False).parse(fp) + etree_cache.clear() + etree_cache[fname] = etree + return etree + yield parse + etree_cache.clear() + + +def flat_dict(d): + return chain.from_iterable( + [ + zip(cycle([fname]), values) + for fname, values in d.items() + ], + ) + + +def tail_check(check): + rex = re.compile(check) + + def checker(nodes): + for node in nodes: + if node.tail and rex.search(node.tail): + return True + msg = f'{check!r} not found in tail of any nodes {nodes}' + raise AssertionError(msg) + return checker + + +def check_xpath(etree, fname, path, check, be_found=True): + nodes = list(etree.findall(path)) + if check is None: + assert nodes == [], ('found any nodes matching xpath ' + '%r in file %s' % (path, fname)) + return + else: + assert nodes != [], ('did not find any node matching xpath ' + '%r in file %s' % (path, fname)) + if callable(check): + check(nodes) + elif not check: + # only check for node presence + pass + else: + def get_text(node): + if node.text is not None: + # the node has only one text + return node.text + else: + # the node has tags and text; gather texts just under the node + return ''.join(n.tail or '' for n in node) + + rex = re.compile(check) + if be_found: + if any(rex.search(get_text(node)) for node in nodes): + return + else: + if all(not rex.search(get_text(node)) for node in nodes): + return + + raise AssertionError('%r not found in any node matching ' + 'path %s in %s: %r' % (check, path, fname, + [node.text for node in nodes])) + + +@pytest.mark.sphinx('html', testroot='warnings') +def test_html_warnings(app, warning): + app.build() + html_warnings = strip_escseq(re.sub(re.escape(os.sep) + '{1,2}', '/', warning.getvalue())) + html_warnings_exp = HTML_WARNINGS % { + 'root': re.escape(app.srcdir.as_posix())} + assert re.match(html_warnings_exp + '$', html_warnings), \ + "Warnings don't match:\n" + \ + '--- Expected (regex):\n' + html_warnings_exp + \ + '--- Got:\n' + html_warnings + + +def test_html4_error(make_app, tmp_path): + (tmp_path / 'conf.py').write_text('', encoding='utf-8') + with pytest.raises( + ConfigError, + match='HTML 4 is no longer supported by Sphinx', + ): + make_app( + buildername='html', + srcdir=tmp_path, + confoverrides={'html4_writer': True}, + ) + + +@pytest.mark.parametrize(("fname", "expect"), flat_dict({ + 'images.html': [ + (".//img[@src='_images/img.png']", ''), + (".//img[@src='_images/img1.png']", ''), + (".//img[@src='_images/simg.png']", ''), + (".//img[@src='_images/svgimg.svg']", ''), + (".//a[@href='_sources/images.txt']", ''), + ], + 'subdir/images.html': [ + (".//img[@src='../_images/img1.png']", ''), + (".//img[@src='../_images/rimg.png']", ''), + ], + 'subdir/includes.html': [ + (".//a[@class='reference download internal']", ''), + (".//img[@src='../_images/img.png']", ''), + (".//p", 'This is an include file.'), + (".//pre/span", 'line 1'), + (".//pre/span", 'line 2'), + ], + 'includes.html': [ + (".//pre", 'Max Strauß'), + (".//a[@class='reference download internal']", ''), + (".//pre/span", '"quotes"'), + (".//pre/span", "'included'"), + (".//pre/span[@class='s2']", 'üöä'), + (".//div[@class='inc-pyobj1 highlight-text notranslate']//pre", + r'^class Foo:\n pass\n\s*$'), + (".//div[@class='inc-pyobj2 highlight-text notranslate']//pre", + r'^ def baz\(\):\n pass\n\s*$'), + (".//div[@class='inc-lines highlight-text notranslate']//pre", + r'^class Foo:\n pass\nclass Bar:\n$'), + (".//div[@class='inc-startend highlight-text notranslate']//pre", + '^foo = "Including Unicode characters: üöä"\\n$'), + (".//div[@class='inc-preappend highlight-text notranslate']//pre", + r'(?m)^START CODE$'), + (".//div[@class='inc-pyobj-dedent highlight-python notranslate']//span", + r'def'), + (".//div[@class='inc-tab3 highlight-text notranslate']//pre", + r'-| |-'), + (".//div[@class='inc-tab8 highlight-python notranslate']//pre/span", + r'-| |-'), + ], + 'autodoc.html': [ + (".//dl[@class='py class']/dt[@id='autodoc_target.Class']", ''), + (".//dl[@class='py function']/dt[@id='autodoc_target.function']/em/span/span", r'\*\*'), + (".//dl[@class='py function']/dt[@id='autodoc_target.function']/em/span/span", r'kwds'), + (".//dd/p", r'Return spam\.'), + ], + 'extapi.html': [ + (".//strong", 'from class: Bar'), + ], + 'markup.html': [ + (".//title", 'set by title directive'), + (".//p/em", 'Section author: Georg Brandl'), + (".//p/em", 'Module author: Georg Brandl'), + # created by the meta directive + (".//meta[@name='author'][@content='Me']", ''), + (".//meta[@name='keywords'][@content='docs, sphinx']", ''), + # a label created by ``.. _label:`` + (".//div[@id='label']", ''), + # code with standard code blocks + (".//pre", '^some code$'), + # an option list + (".//span[@class='option']", '--help'), + # admonitions + (".//p[@class='admonition-title']", 'My Admonition'), + (".//div[@class='admonition note']/p", 'Note text.'), + (".//div[@class='admonition warning']/p", 'Warning text.'), + # inline markup + (".//li/p/strong", r'^command\\n$'), + (".//li/p/strong", r'^program\\n$'), + (".//li/p/em", r'^dfn\\n$'), + (".//li/p/kbd", r'^kbd\\n$'), + (".//li/p/span", 'File \N{TRIANGULAR BULLET} Close'), + (".//li/p/code/span[@class='pre']", '^a/$'), + (".//li/p/code/em/span[@class='pre']", '^varpart$'), + (".//li/p/code/em/span[@class='pre']", '^i$'), + (".//a[@href='https://peps.python.org/pep-0008/']" + "[@class='pep reference external']/strong", 'PEP 8'), + (".//a[@href='https://peps.python.org/pep-0008/']" + "[@class='pep reference external']/strong", + 'Python Enhancement Proposal #8'), + (".//a[@href='https://datatracker.ietf.org/doc/html/rfc1.html']" + "[@class='rfc reference external']/strong", 'RFC 1'), + (".//a[@href='https://datatracker.ietf.org/doc/html/rfc1.html']" + "[@class='rfc reference external']/strong", 'Request for Comments #1'), + (".//a[@href='objects.html#envvar-HOME']" + "[@class='reference internal']/code/span[@class='pre']", 'HOME'), + (".//a[@href='#with']" + "[@class='reference internal']/code/span[@class='pre']", '^with$'), + (".//a[@href='#grammar-token-try_stmt']" + "[@class='reference internal']/code/span", '^statement$'), + (".//a[@href='#some-label'][@class='reference internal']/span", '^here$'), + (".//a[@href='#some-label'][@class='reference internal']/span", '^there$'), + (".//a[@href='subdir/includes.html']" + "[@class='reference internal']/span", 'Including in subdir'), + (".//a[@href='objects.html#cmdoption-python-c']" + "[@class='reference internal']/code/span[@class='pre']", '-c'), + # abbreviations + (".//abbr[@title='abbreviation']", '^abbr$'), + # version stuff + (".//div[@class='versionadded']/p/span", 'New in version 0.6: '), + (".//div[@class='versionadded']/p/span", + tail_check('First paragraph of versionadded')), + (".//div[@class='versionchanged']/p/span", + tail_check('First paragraph of versionchanged')), + (".//div[@class='versionchanged']/p", + 'Second paragraph of versionchanged'), + # footnote reference + (".//a[@class='footnote-reference brackets']", r'1'), + # created by reference lookup + (".//a[@href='index.html#ref1']", ''), + # ``seealso`` directive + (".//div/p[@class='admonition-title']", 'See also'), + # a ``hlist`` directive + (".//table[@class='hlist']/tbody/tr/td/ul/li/p", '^This$'), + # a ``centered`` directive + (".//p[@class='centered']/strong", 'LICENSE'), + # a glossary + (".//dl/dt[@id='term-boson']", 'boson'), + (".//dl/dt[@id='term-boson']/a", '¶'), + # a production list + (".//pre/strong", 'try_stmt'), + (".//pre/a[@href='#grammar-token-try1_stmt']/code/span", 'try1_stmt'), + # tests for ``only`` directive + (".//p", 'A global substitution!'), + (".//p", 'In HTML.'), + (".//p", 'In both.'), + (".//p", 'Always present'), + # tests for ``any`` role + (".//a[@href='#with']/span", 'headings'), + (".//a[@href='objects.html#func_without_body']/code/span", 'objects'), + # tests for numeric labels + (".//a[@href='#id1'][@class='reference internal']/span", 'Testing various markup'), + # tests for smartypants + (".//li/p", 'Smart “quotes” in English ‘text’.'), + (".//li/p", 'Smart — long and – short dashes.'), + (".//li/p", 'Ellipsis…'), + (".//li/p/code/span[@class='pre']", 'foo--"bar"...'), + (".//p", 'Этот «абзац» должен использовать „русские“ кавычки.'), + (".//p", 'Il dit : « C’est “super” ! »'), + ], + 'objects.html': [ + (".//dt[@id='mod.Cls.meth1']", ''), + (".//dt[@id='errmod.Error']", ''), + (".//dt/span[@class='sig-name descname']/span[@class='pre']", r'long\(parameter,'), + (".//dt/span[@class='sig-name descname']/span[@class='pre']", r'list\)'), + (".//dt/span[@class='sig-name descname']/span[@class='pre']", 'another'), + (".//dt/span[@class='sig-name descname']/span[@class='pre']", 'one'), + (".//a[@href='#mod.Cls'][@class='reference internal']", ''), + (".//dl[@class='std userdesc']", ''), + (".//dt[@id='userdesc-myobj']", ''), + (".//a[@href='#userdesc-myobj'][@class='reference internal']", ''), + # docfields + (".//a[@class='reference internal'][@href='#TimeInt']/em", 'TimeInt'), + (".//a[@class='reference internal'][@href='#Time']", 'Time'), + (".//a[@class='reference internal'][@href='#errmod.Error']/strong", 'Error'), + # C references + (".//span[@class='pre']", 'CFunction()'), + (".//a[@href='#c.Sphinx_DoSomething']", ''), + (".//a[@href='#c.SphinxStruct.member']", ''), + (".//a[@href='#c.SPHINX_USE_PYTHON']", ''), + (".//a[@href='#c.SphinxType']", ''), + (".//a[@href='#c.sphinx_global']", ''), + # test global TOC created by toctree() + (".//ul[@class='current']/li[@class='toctree-l1 current']/a[@href='#']", + 'Testing object descriptions'), + (".//li[@class='toctree-l1']/a[@href='markup.html']", + 'Testing various markup'), + # test unknown field names + (".//dt[@class='field-odd']", 'Field_name'), + (".//dt[@class='field-even']", 'Field_name all lower'), + (".//dt[@class='field-odd']", 'FIELD_NAME'), + (".//dt[@class='field-even']", 'FIELD_NAME ALL CAPS'), + (".//dt[@class='field-odd']", 'Field_Name'), + (".//dt[@class='field-even']", 'Field_Name All Word Caps'), + (".//dt[@class='field-odd']", 'Field_name'), + (".//dt[@class='field-even']", 'Field_name First word cap'), + (".//dt[@class='field-odd']", 'FIELd_name'), + (".//dt[@class='field-even']", 'FIELd_name PARTial caps'), + # custom sidebar + (".//h4", 'Custom sidebar'), + # docfields + (".//dd[@class='field-odd']/p/strong", '^moo$'), + (".//dd[@class='field-odd']/p/strong", tail_check(r'\(Moo\) .* Moo')), + (".//dd[@class='field-odd']/ul/li/p/strong", '^hour$'), + (".//dd[@class='field-odd']/ul/li/p/em", '^DuplicateType$'), + (".//dd[@class='field-odd']/ul/li/p/em", tail_check(r'.* Some parameter')), + # others + (".//a[@class='reference internal'][@href='#cmdoption-perl-arg-p']/code/span", + 'perl'), + (".//a[@class='reference internal'][@href='#cmdoption-perl-arg-p']/code/span", + '\\+p'), + (".//a[@class='reference internal'][@href='#cmdoption-perl-ObjC']/code/span", + '--ObjC\\+\\+'), + (".//a[@class='reference internal'][@href='#cmdoption-perl-plugin.option']/code/span", + '--plugin.option'), + (".//a[@class='reference internal'][@href='#cmdoption-perl-arg-create-auth-token']" + "/code/span", + 'create-auth-token'), + (".//a[@class='reference internal'][@href='#cmdoption-perl-arg-arg']/code/span", + 'arg'), + (".//a[@class='reference internal'][@href='#cmdoption-perl-j']/code/span", + '-j'), + (".//a[@class='reference internal'][@href='#cmdoption-hg-arg-commit']/code/span", + 'hg'), + (".//a[@class='reference internal'][@href='#cmdoption-hg-arg-commit']/code/span", + 'commit'), + (".//a[@class='reference internal'][@href='#cmdoption-git-commit-p']/code/span", + 'git'), + (".//a[@class='reference internal'][@href='#cmdoption-git-commit-p']/code/span", + 'commit'), + (".//a[@class='reference internal'][@href='#cmdoption-git-commit-p']/code/span", + '-p'), + ], + 'index.html': [ + (".//meta[@name='hc'][@content='hcval']", ''), + (".//meta[@name='hc_co'][@content='hcval_co']", ''), + (".//li[@class='toctree-l1']/a", 'Testing various markup'), + (".//li[@class='toctree-l2']/a", 'Inline markup'), + (".//title", 'Sphinx <Tests>'), + (".//div[@class='footer']", 'copyright text credits'), + (".//a[@href='http://python.org/']" + "[@class='reference external']", ''), + (".//li/p/a[@href='genindex.html']/span", 'Index'), + (".//li/p/a[@href='py-modindex.html']/span", 'Module Index'), + # custom sidebar only for contents + (".//h4", 'Contents sidebar'), + # custom JavaScript + (".//script[@src='file://moo.js']", ''), + # URL in contents + (".//a[@class='reference external'][@href='http://sphinx-doc.org/']", + 'http://sphinx-doc.org/'), + (".//a[@class='reference external'][@href='http://sphinx-doc.org/latest/']", + 'Latest reference'), + # Indirect hyperlink targets across files + (".//a[@href='markup.html#some-label'][@class='reference internal']/span", + '^indirect hyperref$'), + ], + 'bom.html': [ + (".//title", " File with UTF-8 BOM"), + ], + 'extensions.html': [ + (".//a[@href='http://python.org/dev/']", "http://python.org/dev/"), + (".//a[@href='http://bugs.python.org/issue1000']", "issue 1000"), + (".//a[@href='http://bugs.python.org/issue1042']", "explicit caption"), + ], + 'genindex.html': [ + # index entries + (".//a/strong", "Main"), + (".//a/strong", "[1]"), + (".//a/strong", "Other"), + (".//a", "entry"), + (".//li/a", "double"), + ], + 'otherext.html': [ + (".//h1", "Generated section"), + (".//a[@href='_sources/otherext.foo.txt']", ''), + ], +})) +@pytest.mark.sphinx('html', tags=['testtag'], + confoverrides={'html_context.hckey_co': 'hcval_co'}) +@pytest.mark.test_params(shared_result='test_build_html_output') +def test_html5_output(app, cached_etree_parse, fname, expect): + app.build() + print(app.outdir / fname) + check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect) + + +@pytest.mark.parametrize(("fname", "expect"), flat_dict({ + 'index.html': [ + (".//div[@class='citation']/span", r'Ref1'), + (".//div[@class='citation']/span", r'Ref_1'), + ], + 'footnote.html': [ + (".//a[@class='footnote-reference brackets'][@href='#id9'][@id='id1']", r"1"), + (".//a[@class='footnote-reference brackets'][@href='#id10'][@id='id2']", r"2"), + (".//a[@class='footnote-reference brackets'][@href='#foo'][@id='id3']", r"3"), + (".//a[@class='reference internal'][@href='#bar'][@id='id4']/span", r"\[bar\]"), + (".//a[@class='reference internal'][@href='#baz-qux'][@id='id5']/span", r"\[baz_qux\]"), + (".//a[@class='footnote-reference brackets'][@href='#id11'][@id='id6']", r"4"), + (".//a[@class='footnote-reference brackets'][@href='#id12'][@id='id7']", r"5"), + (".//aside[@class='footnote brackets']/span/a[@href='#id1']", r"1"), + (".//aside[@class='footnote brackets']/span/a[@href='#id2']", r"2"), + (".//aside[@class='footnote brackets']/span/a[@href='#id3']", r"3"), + (".//div[@class='citation']/span/a[@href='#id4']", r"bar"), + (".//div[@class='citation']/span/a[@href='#id5']", r"baz_qux"), + (".//aside[@class='footnote brackets']/span/a[@href='#id6']", r"4"), + (".//aside[@class='footnote brackets']/span/a[@href='#id7']", r"5"), + (".//aside[@class='footnote brackets']/span/a[@href='#id8']", r"6"), + ], +})) +@pytest.mark.sphinx('html') +@pytest.mark.test_params(shared_result='test_build_html_output_docutils18') +def test_docutils_output(app, cached_etree_parse, fname, expect): + app.build() + print(app.outdir / fname) + check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect) + + +@pytest.mark.sphinx('html', parallel=2) +def test_html_parallel(app): + app.build() + + +@pytest.mark.sphinx('html') +@pytest.mark.test_params(shared_result='test_build_html_output') +def test_html_download(app): + app.build() + + # subdir/includes.html + result = (app.outdir / 'subdir' / 'includes.html').read_text(encoding='utf8') + pattern = ('<a class="reference download internal" download="" ' + 'href="../(_downloads/.*/img.png)">') + matched = re.search(pattern, result) + assert matched + assert (app.outdir / matched.group(1)).exists() + filename = matched.group(1) + + # includes.html + result = (app.outdir / 'includes.html').read_text(encoding='utf8') + pattern = ('<a class="reference download internal" download="" ' + 'href="(_downloads/.*/img.png)">') + matched = re.search(pattern, result) + assert matched + assert (app.outdir / matched.group(1)).exists() + assert matched.group(1) == filename + + pattern = ('<a class="reference download internal" download="" ' + 'href="(_downloads/.*/)(file_with_special_%23_chars.xyz)">') + matched = re.search(pattern, result) + assert matched + assert (app.outdir / matched.group(1) / "file_with_special_#_chars.xyz").exists() + + +@pytest.mark.sphinx('html', testroot='roles-download') +def test_html_download_role(app, status, warning): + app.build() + digest = hashlib.md5(b'dummy.dat', usedforsecurity=False).hexdigest() + assert (app.outdir / '_downloads' / digest / 'dummy.dat').exists() + digest_another = hashlib.md5(b'another/dummy.dat', usedforsecurity=False).hexdigest() + assert (app.outdir / '_downloads' / digest_another / 'dummy.dat').exists() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert (('<li><p><a class="reference download internal" download="" ' + 'href="_downloads/%s/dummy.dat">' + '<code class="xref download docutils literal notranslate">' + '<span class="pre">dummy.dat</span></code></a></p></li>' % digest) + in content) + assert (('<li><p><a class="reference download internal" download="" ' + 'href="_downloads/%s/dummy.dat">' + '<code class="xref download docutils literal notranslate">' + '<span class="pre">another/dummy.dat</span></code></a></p></li>' % + digest_another) in content) + assert ('<li><p><code class="xref download docutils literal notranslate">' + '<span class="pre">not_found.dat</span></code></p></li>' in content) + assert ('<li><p><a class="reference download external" download="" ' + 'href="http://www.sphinx-doc.org/en/master/_static/sphinxheader.png">' + '<code class="xref download docutils literal notranslate">' + '<span class="pre">Sphinx</span> <span class="pre">logo</span>' + '</code></a></p></li>' in content) + + +@pytest.mark.sphinx('html', testroot='build-html-translator') +def test_html_translator(app): + app.build() + assert app.builder.docwriter.visitor.depart_with_node == 10 + + +@pytest.mark.parametrize(("fname", "expect"), flat_dict({ + 'index.html': [ + (".//li[@class='toctree-l3']/a", '1.1.1. Foo A1', True), + (".//li[@class='toctree-l3']/a", '1.2.1. Foo B1', True), + (".//li[@class='toctree-l3']/a", '2.1.1. Bar A1', False), + (".//li[@class='toctree-l3']/a", '2.2.1. Bar B1', False), + ], + 'foo.html': [ + (".//h1", 'Foo', True), + (".//h2", 'Foo A', True), + (".//h3", 'Foo A1', True), + (".//h2", 'Foo B', True), + (".//h3", 'Foo B1', True), + + (".//h1//span[@class='section-number']", '1. ', True), + (".//h2//span[@class='section-number']", '1.1. ', True), + (".//h3//span[@class='section-number']", '1.1.1. ', True), + (".//h2//span[@class='section-number']", '1.2. ', True), + (".//h3//span[@class='section-number']", '1.2.1. ', True), + + (".//div[@class='sphinxsidebarwrapper']//li/a", '1.1. Foo A', True), + (".//div[@class='sphinxsidebarwrapper']//li/a", '1.1.1. Foo A1', True), + (".//div[@class='sphinxsidebarwrapper']//li/a", '1.2. Foo B', True), + (".//div[@class='sphinxsidebarwrapper']//li/a", '1.2.1. Foo B1', True), + ], + 'bar.html': [ + (".//h1", 'Bar', True), + (".//h2", 'Bar A', True), + (".//h2", 'Bar B', True), + (".//h3", 'Bar B1', True), + (".//h1//span[@class='section-number']", '2. ', True), + (".//h2//span[@class='section-number']", '2.1. ', True), + (".//h2//span[@class='section-number']", '2.2. ', True), + (".//h3//span[@class='section-number']", '2.2.1. ', True), + (".//div[@class='sphinxsidebarwrapper']//li/a", '2. Bar', True), + (".//div[@class='sphinxsidebarwrapper']//li/a", '2.1. Bar A', True), + (".//div[@class='sphinxsidebarwrapper']//li/a", '2.2. Bar B', True), + (".//div[@class='sphinxsidebarwrapper']//li/a", '2.2.1. Bar B1', False), + ], + 'baz.html': [ + (".//h1", 'Baz A', True), + (".//h1//span[@class='section-number']", '2.1.1. ', True), + ], +})) +@pytest.mark.sphinx('html', testroot='tocdepth') +@pytest.mark.test_params(shared_result='test_build_html_tocdepth') +def test_tocdepth(app, cached_etree_parse, fname, expect): + app.build() + # issue #1251 + check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect) + + +@pytest.mark.parametrize(("fname", "expect"), flat_dict({ + 'index.html': [ + (".//li[@class='toctree-l3']/a", '1.1.1. Foo A1', True), + (".//li[@class='toctree-l3']/a", '1.2.1. Foo B1', True), + (".//li[@class='toctree-l3']/a", '2.1.1. Bar A1', False), + (".//li[@class='toctree-l3']/a", '2.2.1. Bar B1', False), + + # index.rst + (".//h1", 'test-tocdepth', True), + + # foo.rst + (".//h2", 'Foo', True), + (".//h3", 'Foo A', True), + (".//h4", 'Foo A1', True), + (".//h3", 'Foo B', True), + (".//h4", 'Foo B1', True), + (".//h2//span[@class='section-number']", '1. ', True), + (".//h3//span[@class='section-number']", '1.1. ', True), + (".//h4//span[@class='section-number']", '1.1.1. ', True), + (".//h3//span[@class='section-number']", '1.2. ', True), + (".//h4//span[@class='section-number']", '1.2.1. ', True), + + # bar.rst + (".//h2", 'Bar', True), + (".//h3", 'Bar A', True), + (".//h3", 'Bar B', True), + (".//h4", 'Bar B1', True), + (".//h2//span[@class='section-number']", '2. ', True), + (".//h3//span[@class='section-number']", '2.1. ', True), + (".//h3//span[@class='section-number']", '2.2. ', True), + (".//h4//span[@class='section-number']", '2.2.1. ', True), + + # baz.rst + (".//h4", 'Baz A', True), + (".//h4//span[@class='section-number']", '2.1.1. ', True), + ], +})) +@pytest.mark.sphinx('singlehtml', testroot='tocdepth') +@pytest.mark.test_params(shared_result='test_build_html_tocdepth') +def test_tocdepth_singlehtml(app, cached_etree_parse, fname, expect): + app.build() + check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect) + + +@pytest.mark.sphinx('html', testroot='numfig') +@pytest.mark.test_params(shared_result='test_build_html_numfig') +def test_numfig_disabled_warn(app, warning): + app.build() + warnings = warning.getvalue() + assert 'index.rst:47: WARNING: numfig is disabled. :numref: is ignored.' in warnings + assert 'index.rst:56: WARNING: invalid numfig_format: invalid' not in warnings + assert 'index.rst:57: WARNING: invalid numfig_format: Fig %s %s' not in warnings + + +@pytest.mark.parametrize(("fname", "expect"), flat_dict({ + 'index.html': [ + (FIGURE_CAPTION + "/span[@class='caption-number']", None, True), + (".//table/caption/span[@class='caption-number']", None, True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", None, True), + (".//li/p/code/span", '^fig1$', True), + (".//li/p/code/span", '^Figure%s$', True), + (".//li/p/code/span", '^table-1$', True), + (".//li/p/code/span", '^Table:%s$', True), + (".//li/p/code/span", '^CODE_1$', True), + (".//li/p/code/span", '^Code-%s$', True), + (".//li/p/a/span", '^Section 1$', True), + (".//li/p/a/span", '^Section 2.1$', True), + (".//li/p/code/span", '^Fig.{number}$', True), + (".//li/p/a/span", '^Sect.1 Foo$', True), + ], + 'foo.html': [ + (FIGURE_CAPTION + "/span[@class='caption-number']", None, True), + (".//table/caption/span[@class='caption-number']", None, True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", None, True), + ], + 'bar.html': [ + (FIGURE_CAPTION + "/span[@class='caption-number']", None, True), + (".//table/caption/span[@class='caption-number']", None, True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", None, True), + ], + 'baz.html': [ + (FIGURE_CAPTION + "/span[@class='caption-number']", None, True), + (".//table/caption/span[@class='caption-number']", None, True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", None, True), + ], +})) +@pytest.mark.sphinx('html', testroot='numfig') +@pytest.mark.test_params(shared_result='test_build_html_numfig') +def test_numfig_disabled(app, cached_etree_parse, fname, expect): + app.build() + check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect) + + +@pytest.mark.sphinx( + 'html', testroot='numfig', + srcdir='test_numfig_without_numbered_toctree_warn', + confoverrides={'numfig': True}) +def test_numfig_without_numbered_toctree_warn(app, warning): + app.build() + # remove :numbered: option + index = (app.srcdir / 'index.rst').read_text(encoding='utf8') + index = re.sub(':numbered:.*', '', index) + (app.srcdir / 'index.rst').write_text(index, encoding='utf8') + app.build() + + warnings = warning.getvalue() + assert 'index.rst:47: WARNING: numfig is disabled. :numref: is ignored.' not in warnings + assert 'index.rst:55: WARNING: Failed to create a cross reference. Any number is not assigned: index' in warnings + assert 'index.rst:56: WARNING: invalid numfig_format: invalid' in warnings + assert 'index.rst:57: WARNING: invalid numfig_format: Fig %s %s' in warnings + + +@pytest.mark.parametrize(("fname", "expect"), flat_dict({ + 'index.html': [ + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 9 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 10 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 9 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 10 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 9 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 10 $', True), + (".//li/p/a/span", '^Fig. 9$', True), + (".//li/p/a/span", '^Figure6$', True), + (".//li/p/a/span", '^Table 9$', True), + (".//li/p/a/span", '^Table:6$', True), + (".//li/p/a/span", '^Listing 9$', True), + (".//li/p/a/span", '^Code-6$', True), + (".//li/p/code/span", '^foo$', True), + (".//li/p/code/span", '^bar_a$', True), + (".//li/p/a/span", '^Fig.9 should be Fig.1$', True), + (".//li/p/code/span", '^Sect.{number}$', True), + ], + 'foo.html': [ + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 1 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 2 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 3 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 4 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 1 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 2 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 3 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 4 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 1 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 2 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 3 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 4 $', True), + ], + 'bar.html': [ + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 5 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 7 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 8 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 5 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 7 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 8 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 5 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 7 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 8 $', True), + ], + 'baz.html': [ + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 6 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 6 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 6 $', True), + ], +})) +@pytest.mark.sphinx( + 'html', testroot='numfig', + srcdir='test_numfig_without_numbered_toctree', + confoverrides={'numfig': True}) +def test_numfig_without_numbered_toctree(app, cached_etree_parse, fname, expect): + # remove :numbered: option + index = (app.srcdir / 'index.rst').read_text(encoding='utf8') + index = re.sub(':numbered:.*', '', index) + (app.srcdir / 'index.rst').write_text(index, encoding='utf8') + + if not os.listdir(app.outdir): + app.build() + check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect) + + +@pytest.mark.sphinx('html', testroot='numfig', confoverrides={'numfig': True}) +@pytest.mark.test_params(shared_result='test_build_html_numfig_on') +def test_numfig_with_numbered_toctree_warn(app, warning): + app.build() + warnings = warning.getvalue() + assert 'index.rst:47: WARNING: numfig is disabled. :numref: is ignored.' not in warnings + assert 'index.rst:55: WARNING: Failed to create a cross reference. Any number is not assigned: index' in warnings + assert 'index.rst:56: WARNING: invalid numfig_format: invalid' in warnings + assert 'index.rst:57: WARNING: invalid numfig_format: Fig %s %s' in warnings + + +@pytest.mark.parametrize(("fname", "expect"), flat_dict({ + 'index.html': [ + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 1 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 2 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 1 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 2 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 1 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 2 $', True), + (".//li/p/a/span", '^Fig. 1$', True), + (".//li/p/a/span", '^Figure2.2$', True), + (".//li/p/a/span", '^Table 1$', True), + (".//li/p/a/span", '^Table:2.2$', True), + (".//li/p/a/span", '^Listing 1$', True), + (".//li/p/a/span", '^Code-2.2$', True), + (".//li/p/a/span", '^Section.1$', True), + (".//li/p/a/span", '^Section.2.1$', True), + (".//li/p/a/span", '^Fig.1 should be Fig.1$', True), + (".//li/p/a/span", '^Sect.1 Foo$', True), + ], + 'foo.html': [ + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 1.1 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 1.2 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 1.3 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 1.4 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 1.1 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 1.2 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 1.3 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 1.4 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 1.1 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 1.2 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 1.3 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 1.4 $', True), + ], + 'bar.html': [ + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 2.1 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 2.3 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 2.4 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 2.1 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 2.3 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 2.4 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 2.1 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 2.3 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 2.4 $', True), + ], + 'baz.html': [ + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 2.2 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 2.2 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 2.2 $', True), + ], +})) +@pytest.mark.sphinx('html', testroot='numfig', confoverrides={'numfig': True}) +@pytest.mark.test_params(shared_result='test_build_html_numfig_on') +def test_numfig_with_numbered_toctree(app, cached_etree_parse, fname, expect): + app.build() + check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect) + + +@pytest.mark.sphinx('html', testroot='numfig', confoverrides={ + 'numfig': True, + 'numfig_format': {'figure': 'Figure:%s', + 'table': 'Tab_%s', + 'code-block': 'Code-%s', + 'section': 'SECTION-%s'}}) +@pytest.mark.test_params(shared_result='test_build_html_numfig_format_warn') +def test_numfig_with_prefix_warn(app, warning): + app.build() + warnings = warning.getvalue() + assert 'index.rst:47: WARNING: numfig is disabled. :numref: is ignored.' not in warnings + assert 'index.rst:55: WARNING: Failed to create a cross reference. Any number is not assigned: index' in warnings + assert 'index.rst:56: WARNING: invalid numfig_format: invalid' in warnings + assert 'index.rst:57: WARNING: invalid numfig_format: Fig %s %s' in warnings + + +@pytest.mark.parametrize(("fname", "expect"), flat_dict({ + 'index.html': [ + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Figure:1 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Figure:2 $', True), + (".//table/caption/span[@class='caption-number']", + '^Tab_1 $', True), + (".//table/caption/span[@class='caption-number']", + '^Tab_2 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Code-1 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Code-2 $', True), + (".//li/p/a/span", '^Figure:1$', True), + (".//li/p/a/span", '^Figure2.2$', True), + (".//li/p/a/span", '^Tab_1$', True), + (".//li/p/a/span", '^Table:2.2$', True), + (".//li/p/a/span", '^Code-1$', True), + (".//li/p/a/span", '^Code-2.2$', True), + (".//li/p/a/span", '^SECTION-1$', True), + (".//li/p/a/span", '^SECTION-2.1$', True), + (".//li/p/a/span", '^Fig.1 should be Fig.1$', True), + (".//li/p/a/span", '^Sect.1 Foo$', True), + ], + 'foo.html': [ + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Figure:1.1 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Figure:1.2 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Figure:1.3 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Figure:1.4 $', True), + (".//table/caption/span[@class='caption-number']", + '^Tab_1.1 $', True), + (".//table/caption/span[@class='caption-number']", + '^Tab_1.2 $', True), + (".//table/caption/span[@class='caption-number']", + '^Tab_1.3 $', True), + (".//table/caption/span[@class='caption-number']", + '^Tab_1.4 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Code-1.1 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Code-1.2 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Code-1.3 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Code-1.4 $', True), + ], + 'bar.html': [ + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Figure:2.1 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Figure:2.3 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Figure:2.4 $', True), + (".//table/caption/span[@class='caption-number']", + '^Tab_2.1 $', True), + (".//table/caption/span[@class='caption-number']", + '^Tab_2.3 $', True), + (".//table/caption/span[@class='caption-number']", + '^Tab_2.4 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Code-2.1 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Code-2.3 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Code-2.4 $', True), + ], + 'baz.html': [ + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Figure:2.2 $', True), + (".//table/caption/span[@class='caption-number']", + '^Tab_2.2 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Code-2.2 $', True), + ], +})) +@pytest.mark.sphinx('html', testroot='numfig', + confoverrides={'numfig': True, + 'numfig_format': {'figure': 'Figure:%s', + 'table': 'Tab_%s', + 'code-block': 'Code-%s', + 'section': 'SECTION-%s'}}) +@pytest.mark.test_params(shared_result='test_build_html_numfig_format_warn') +def test_numfig_with_prefix(app, cached_etree_parse, fname, expect): + app.build() + check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect) + + +@pytest.mark.sphinx('html', testroot='numfig', + confoverrides={'numfig': True, 'numfig_secnum_depth': 2}) +@pytest.mark.test_params(shared_result='test_build_html_numfig_depth_2') +def test_numfig_with_secnum_depth_warn(app, warning): + app.build() + warnings = warning.getvalue() + assert 'index.rst:47: WARNING: numfig is disabled. :numref: is ignored.' not in warnings + assert 'index.rst:55: WARNING: Failed to create a cross reference. Any number is not assigned: index' in warnings + assert 'index.rst:56: WARNING: invalid numfig_format: invalid' in warnings + assert 'index.rst:57: WARNING: invalid numfig_format: Fig %s %s' in warnings + + +@pytest.mark.parametrize(("fname", "expect"), flat_dict({ + 'index.html': [ + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 1 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 2 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 1 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 2 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 1 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 2 $', True), + (".//li/p/a/span", '^Fig. 1$', True), + (".//li/p/a/span", '^Figure2.1.2$', True), + (".//li/p/a/span", '^Table 1$', True), + (".//li/p/a/span", '^Table:2.1.2$', True), + (".//li/p/a/span", '^Listing 1$', True), + (".//li/p/a/span", '^Code-2.1.2$', True), + (".//li/p/a/span", '^Section.1$', True), + (".//li/p/a/span", '^Section.2.1$', True), + (".//li/p/a/span", '^Fig.1 should be Fig.1$', True), + (".//li/p/a/span", '^Sect.1 Foo$', True), + ], + 'foo.html': [ + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 1.1 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 1.1.1 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 1.1.2 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 1.2.1 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 1.1 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 1.1.1 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 1.1.2 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 1.2.1 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 1.1 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 1.1.1 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 1.1.2 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 1.2.1 $', True), + ], + 'bar.html': [ + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 2.1.1 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 2.1.3 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 2.2.1 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 2.1.1 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 2.1.3 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 2.2.1 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 2.1.1 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 2.1.3 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 2.2.1 $', True), + ], + 'baz.html': [ + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 2.1.2 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 2.1.2 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 2.1.2 $', True), + ], +})) +@pytest.mark.sphinx('html', testroot='numfig', + confoverrides={'numfig': True, + 'numfig_secnum_depth': 2}) +@pytest.mark.test_params(shared_result='test_build_html_numfig_depth_2') +def test_numfig_with_secnum_depth(app, cached_etree_parse, fname, expect): + app.build() + check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect) + + +@pytest.mark.parametrize(("fname", "expect"), flat_dict({ + 'index.html': [ + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 1 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 2 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 1 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 2 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 1 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 2 $', True), + (".//li/p/a/span", '^Fig. 1$', True), + (".//li/p/a/span", '^Figure2.2$', True), + (".//li/p/a/span", '^Table 1$', True), + (".//li/p/a/span", '^Table:2.2$', True), + (".//li/p/a/span", '^Listing 1$', True), + (".//li/p/a/span", '^Code-2.2$', True), + (".//li/p/a/span", '^Section.1$', True), + (".//li/p/a/span", '^Section.2.1$', True), + (".//li/p/a/span", '^Fig.1 should be Fig.1$', True), + (".//li/p/a/span", '^Sect.1 Foo$', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 1.1 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 1.2 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 1.3 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 1.4 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 1.1 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 1.2 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 1.3 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 1.4 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 1.1 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 1.2 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 1.3 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 1.4 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 2.1 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 2.3 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 2.4 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 2.1 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 2.3 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 2.4 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 2.1 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 2.3 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 2.4 $', True), + (FIGURE_CAPTION + "/span[@class='caption-number']", '^Fig. 2.2 $', True), + (".//table/caption/span[@class='caption-number']", + '^Table 2.2 $', True), + (".//div[@class='code-block-caption']/" + "span[@class='caption-number']", '^Listing 2.2 $', True), + ], +})) +@pytest.mark.sphinx('singlehtml', testroot='numfig', confoverrides={'numfig': True}) +@pytest.mark.test_params(shared_result='test_build_html_numfig_on') +def test_numfig_with_singlehtml(app, cached_etree_parse, fname, expect): + app.build() + check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect) + + +@pytest.mark.parametrize(("fname", "expect"), flat_dict({ + 'index.html': [ + (FIGURE_CAPTION + "//span[@class='caption-number']", "Fig. 1", True), + (FIGURE_CAPTION + "//span[@class='caption-number']", "Fig. 2", True), + (FIGURE_CAPTION + "//span[@class='caption-number']", "Fig. 3", True), + (".//div//span[@class='caption-number']", "No.1 ", True), + (".//div//span[@class='caption-number']", "No.2 ", True), + (".//li/p/a/span", 'Fig. 1', True), + (".//li/p/a/span", 'Fig. 2', True), + (".//li/p/a/span", 'Fig. 3', True), + (".//li/p/a/span", 'No.1', True), + (".//li/p/a/span", 'No.2', True), + ], +})) +@pytest.mark.sphinx('html', testroot='add_enumerable_node', + srcdir='test_enumerable_node') +def test_enumerable_node(app, cached_etree_parse, fname, expect): + app.build() + check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect) + + +@pytest.mark.sphinx('html', testroot='html_assets') +def test_html_assets(app): + app.builder.build_all() + + # exclude_path and its family + assert not (app.outdir / 'static' / 'index.html').exists() + assert not (app.outdir / 'extra' / 'index.html').exists() + + # html_static_path + assert not (app.outdir / '_static' / '.htaccess').exists() + assert not (app.outdir / '_static' / '.htpasswd').exists() + assert (app.outdir / '_static' / 'API.html').exists() + assert (app.outdir / '_static' / 'API.html').read_text(encoding='utf8') == 'Sphinx-1.4.4' + assert (app.outdir / '_static' / 'css' / 'style.css').exists() + assert (app.outdir / '_static' / 'js' / 'custom.js').exists() + assert (app.outdir / '_static' / 'rimg.png').exists() + assert not (app.outdir / '_static' / '_build' / 'index.html').exists() + assert (app.outdir / '_static' / 'background.png').exists() + assert not (app.outdir / '_static' / 'subdir' / '.htaccess').exists() + assert not (app.outdir / '_static' / 'subdir' / '.htpasswd').exists() + + # html_extra_path + assert (app.outdir / '.htaccess').exists() + assert not (app.outdir / '.htpasswd').exists() + assert (app.outdir / 'API.html_t').exists() + assert (app.outdir / 'css/style.css').exists() + assert (app.outdir / 'rimg.png').exists() + assert not (app.outdir / '_build' / 'index.html').exists() + assert (app.outdir / 'background.png').exists() + assert (app.outdir / 'subdir' / '.htaccess').exists() + assert not (app.outdir / 'subdir' / '.htpasswd').exists() + + # html_css_files + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert '<link rel="stylesheet" type="text/css" href="_static/css/style.css" />' in content + assert ('<link media="print" rel="stylesheet" title="title" type="text/css" ' + 'href="https://example.com/custom.css" />' in content) + + # html_js_files + assert '<script src="_static/js/custom.js"></script>' in content + assert ('<script async="async" src="https://example.com/script.js">' + '</script>' in content) + + +@pytest.mark.sphinx('html', testroot='html_assets') +def test_assets_order(app, monkeypatch): + monkeypatch.setattr(sphinx.builders.html, '_file_checksum', lambda o, f: '') + + app.add_css_file('normal.css') + app.add_css_file('early.css', priority=100) + app.add_css_file('late.css', priority=750) + app.add_css_file('lazy.css', priority=900) + app.add_js_file('normal.js') + app.add_js_file('early.js', priority=100) + app.add_js_file('late.js', priority=750) + app.add_js_file('lazy.js', priority=900) + + app.builder.build_all() + content = (app.outdir / 'index.html').read_text(encoding='utf8') + + # css_files + expected = [ + '_static/early.css', + '_static/pygments.css', + '_static/alabaster.css', + 'https://example.com/custom.css', + '_static/normal.css', + '_static/late.css', + '_static/css/style.css', + '_static/lazy.css', + ] + pattern = '.*'.join(f'href="{re.escape(f)}"' for f in expected) + assert re.search(pattern, content, re.DOTALL), content + + # js_files + expected = [ + '_static/early.js', + '_static/doctools.js', + '_static/sphinx_highlight.js', + 'https://example.com/script.js', + '_static/normal.js', + '_static/late.js', + '_static/js/custom.js', + '_static/lazy.js', + ] + pattern = '.*'.join(f'src="{re.escape(f)}"' for f in expected) + assert re.search(pattern, content, re.DOTALL), content + + +@pytest.mark.sphinx('html', testroot='html_file_checksum') +def test_file_checksum(app): + app.add_css_file('stylesheet-a.css') + app.add_css_file('stylesheet-b.css') + app.add_css_file('https://example.com/custom.css') + app.add_js_file('script.js') + app.add_js_file('empty.js') + app.add_js_file('https://example.com/script.js') + + app.builder.build_all() + content = (app.outdir / 'index.html').read_text(encoding='utf8') + + # checksum for local files + assert '<link rel="stylesheet" type="text/css" href="_static/stylesheet-a.css?v=e575b6df" />' in content + assert '<link rel="stylesheet" type="text/css" href="_static/stylesheet-b.css?v=a2d5cc0f" />' in content + assert '<script src="_static/script.js?v=48278d48"></script>' in content + + # empty files have no checksum + assert '<script src="_static/empty.js"></script>' in content + + # no checksum for hyperlinks + assert '<link rel="stylesheet" type="text/css" href="https://example.com/custom.css" />' in content + assert '<script src="https://example.com/script.js"></script>' in content + + +def test_file_checksum_query_string(): + with pytest.raises(ThemeError, match='Local asset file paths must not contain query strings'): + _file_checksum(Path(), 'with_query_string.css?dead_parrots=1') + + with pytest.raises(ThemeError, match='Local asset file paths must not contain query strings'): + _file_checksum(Path(), 'with_query_string.js?dead_parrots=1') + + with pytest.raises(ThemeError, match='Local asset file paths must not contain query strings'): + _file_checksum(Path.cwd(), '_static/with_query_string.css?dead_parrots=1') + + with pytest.raises(ThemeError, match='Local asset file paths must not contain query strings'): + _file_checksum(Path.cwd(), '_static/with_query_string.js?dead_parrots=1') + + +@pytest.mark.sphinx('html', testroot='html_assets') +def test_javscript_loading_method(app): + app.add_js_file('normal.js') + app.add_js_file('early.js', loading_method='async') + app.add_js_file('late.js', loading_method='defer') + + app.builder.build_all() + content = (app.outdir / 'index.html').read_text(encoding='utf8') + + assert '<script src="_static/normal.js"></script>' in content + assert '<script async="async" src="_static/early.js"></script>' in content + assert '<script defer="defer" src="_static/late.js"></script>' in content + + +@pytest.mark.sphinx('html', testroot='basic', confoverrides={'html_copy_source': False}) +def test_html_copy_source(app): + app.builder.build_all() + assert not (app.outdir / '_sources' / 'index.rst.txt').exists() + + +@pytest.mark.sphinx('html', testroot='basic', confoverrides={'html_sourcelink_suffix': '.txt'}) +def test_html_sourcelink_suffix(app): + app.builder.build_all() + assert (app.outdir / '_sources' / 'index.rst.txt').exists() + + +@pytest.mark.sphinx('html', testroot='basic', confoverrides={'html_sourcelink_suffix': '.rst'}) +def test_html_sourcelink_suffix_same(app): + app.builder.build_all() + assert (app.outdir / '_sources' / 'index.rst').exists() + + +@pytest.mark.sphinx('html', testroot='basic', confoverrides={'html_sourcelink_suffix': ''}) +def test_html_sourcelink_suffix_empty(app): + app.builder.build_all() + assert (app.outdir / '_sources' / 'index.rst').exists() + + +@pytest.mark.sphinx('html', testroot='html_entity') +def test_html_entity(app): + app.builder.build_all() + valid_entities = {'amp', 'lt', 'gt', 'quot', 'apos'} + content = (app.outdir / 'index.html').read_text(encoding='utf8') + for entity in re.findall(r'&([a-z]+);', content, re.M): + assert entity not in valid_entities + + +@pytest.mark.sphinx('html', testroot='basic') +def test_html_inventory(app): + app.builder.build_all() + + with app.outdir.joinpath('objects.inv').open('rb') as f: + invdata = InventoryFile.load(f, 'https://www.google.com', posixpath.join) + + assert set(invdata.keys()) == {'std:label', 'std:doc'} + assert set(invdata['std:label'].keys()) == {'modindex', + 'py-modindex', + 'genindex', + 'search'} + assert invdata['std:label']['modindex'] == ('Python', + '', + 'https://www.google.com/py-modindex.html', + 'Module Index') + assert invdata['std:label']['py-modindex'] == ('Python', + '', + 'https://www.google.com/py-modindex.html', + 'Python Module Index') + assert invdata['std:label']['genindex'] == ('Python', + '', + 'https://www.google.com/genindex.html', + 'Index') + assert invdata['std:label']['search'] == ('Python', + '', + 'https://www.google.com/search.html', + 'Search Page') + assert set(invdata['std:doc'].keys()) == {'index'} + assert invdata['std:doc']['index'] == ('Python', + '', + 'https://www.google.com/index.html', + 'The basic Sphinx documentation for testing') + + +@pytest.mark.sphinx('html', testroot='images', confoverrides={'html_sourcelink_suffix': ''}) +def test_html_anchor_for_figure(app): + app.builder.build_all() + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert ('<figcaption>\n<p><span class="caption-text">The caption of pic</span>' + '<a class="headerlink" href="#id1" title="Link to this image">¶</a></p>\n</figcaption>' + in content) + + +@pytest.mark.sphinx('html', testroot='directives-raw') +def test_html_raw_directive(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'index.html').read_text(encoding='utf8') + + # standard case + assert 'standalone raw directive (HTML)' in result + assert 'standalone raw directive (LaTeX)' not in result + + # with substitution + assert '<p>HTML: abc def ghi</p>' in result + assert '<p>LaTeX: abc ghi</p>' in result + + +@pytest.mark.parametrize(("fname", "expect"), flat_dict({ + 'index.html': [ + (".//link[@href='_static/persistent.css']" + "[@rel='stylesheet']", '', True), + (".//link[@href='_static/default.css']" + "[@rel='stylesheet']" + "[@title='Default']", '', True), + (".//link[@href='_static/alternate1.css']" + "[@rel='alternate stylesheet']" + "[@title='Alternate']", '', True), + (".//link[@href='_static/alternate2.css']" + "[@rel='alternate stylesheet']", '', True), + (".//link[@href='_static/more_persistent.css']" + "[@rel='stylesheet']", '', True), + (".//link[@href='_static/more_default.css']" + "[@rel='stylesheet']" + "[@title='Default']", '', True), + (".//link[@href='_static/more_alternate1.css']" + "[@rel='alternate stylesheet']" + "[@title='Alternate']", '', True), + (".//link[@href='_static/more_alternate2.css']" + "[@rel='alternate stylesheet']", '', True), + ], +})) +@pytest.mark.sphinx('html', testroot='stylesheets') +def test_alternate_stylesheets(app, cached_etree_parse, fname, expect): + app.build() + check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect) + + +@pytest.mark.sphinx('html', testroot='html_style') +def test_html_style(app, status, warning): + app.build() + result = (app.outdir / 'index.html').read_text(encoding='utf8') + assert '<link rel="stylesheet" type="text/css" href="_static/default.css" />' in result + assert ('<link rel="stylesheet" type="text/css" href="_static/alabaster.css" />' + not in result) + + +@pytest.mark.sphinx('html', testroot='images') +def test_html_remote_images(app, status, warning): + app.builder.build_all() + + result = (app.outdir / 'index.html').read_text(encoding='utf8') + assert ('<img alt="https://www.python.org/static/img/python-logo.png" ' + 'src="https://www.python.org/static/img/python-logo.png" />' in result) + assert not (app.outdir / 'python-logo.png').exists() + + +@pytest.mark.sphinx('html', testroot='image-escape') +def test_html_encoded_image(app, status, warning): + app.builder.build_all() + + result = (app.outdir / 'index.html').read_text(encoding='utf8') + assert ('<img alt="_images/img_%231.png" src="_images/img_%231.png" />' in result) + assert (app.outdir / '_images/img_#1.png').exists() + + +@pytest.mark.sphinx('html', testroot='remote-logo') +def test_html_remote_logo(app, status, warning): + app.builder.build_all() + + result = (app.outdir / 'index.html').read_text(encoding='utf8') + assert ('<img class="logo" src="https://www.python.org/static/img/python-logo.png" alt="Logo"/>' in result) + assert ('<link rel="icon" href="https://www.python.org/static/favicon.ico"/>' in result) + assert not (app.outdir / 'python-logo.png').exists() + + +@pytest.mark.sphinx('html', testroot='local-logo') +def test_html_local_logo(app, status, warning): + app.builder.build_all() + + result = (app.outdir / 'index.html').read_text(encoding='utf8') + assert ('<img class="logo" src="_static/img.png" alt="Logo"/>' in result) + assert (app.outdir / '_static/img.png').exists() + + +@pytest.mark.sphinx('html', testroot='basic') +def test_html_sidebar(app, status, warning): + ctx = {} + + # default for alabaster + app.builder.build_all() + result = (app.outdir / 'index.html').read_text(encoding='utf8') + assert ('<div class="sphinxsidebar" role="navigation" ' + 'aria-label="main navigation">' in result) + assert '<h1 class="logo"><a href="#">Python</a></h1>' in result + assert '<h3>Navigation</h3>' in result + assert '<h3>Related Topics</h3>' in result + assert '<h3 id="searchlabel">Quick search</h3>' in result + + app.builder.add_sidebars('index', ctx) + assert ctx['sidebars'] == ['about.html', 'navigation.html', 'relations.html', + 'searchbox.html', 'donate.html'] + + # only relations.html + app.config.html_sidebars = {'**': ['relations.html']} + app.builder.build_all() + result = (app.outdir / 'index.html').read_text(encoding='utf8') + assert ('<div class="sphinxsidebar" role="navigation" ' + 'aria-label="main navigation">' in result) + assert '<h1 class="logo"><a href="#">Python</a></h1>' not in result + assert '<h3>Navigation</h3>' not in result + assert '<h3>Related Topics</h3>' in result + assert '<h3 id="searchlabel">Quick search</h3>' not in result + + app.builder.add_sidebars('index', ctx) + assert ctx['sidebars'] == ['relations.html'] + + # no sidebars + app.config.html_sidebars = {'**': []} + app.builder.build_all() + result = (app.outdir / 'index.html').read_text(encoding='utf8') + assert ('<div class="sphinxsidebar" role="navigation" ' + 'aria-label="main navigation">' not in result) + assert '<h1 class="logo"><a href="#">Python</a></h1>' not in result + assert '<h3>Navigation</h3>' not in result + assert '<h3>Related Topics</h3>' not in result + assert '<h3 id="searchlabel">Quick search</h3>' not in result + + app.builder.add_sidebars('index', ctx) + assert ctx['sidebars'] == [] + + +@pytest.mark.parametrize(("fname", "expect"), flat_dict({ + 'index.html': [(".//em/a[@href='https://example.com/man.1']", "", True), + (".//em/a[@href='https://example.com/ls.1']", "", True), + (".//em/a[@href='https://example.com/sphinx.']", "", True)], + +})) +@pytest.mark.sphinx('html', testroot='manpage_url', confoverrides={ + 'manpages_url': 'https://example.com/{page}.{section}'}) +@pytest.mark.test_params(shared_result='test_build_html_manpage_url') +def test_html_manpage(app, cached_etree_parse, fname, expect): + app.build() + check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect) + + +@pytest.mark.sphinx('html', testroot='toctree-glob', + confoverrides={'html_baseurl': 'https://example.com/'}) +def test_html_baseurl(app, status, warning): + app.build() + + result = (app.outdir / 'index.html').read_text(encoding='utf8') + assert '<link rel="canonical" href="https://example.com/index.html" />' in result + + result = (app.outdir / 'qux' / 'index.html').read_text(encoding='utf8') + assert '<link rel="canonical" href="https://example.com/qux/index.html" />' in result + + +@pytest.mark.sphinx('html', testroot='toctree-glob', + confoverrides={'html_baseurl': 'https://example.com/subdir', + 'html_file_suffix': '.htm'}) +def test_html_baseurl_and_html_file_suffix(app, status, warning): + app.build() + + result = (app.outdir / 'index.htm').read_text(encoding='utf8') + assert '<link rel="canonical" href="https://example.com/subdir/index.htm" />' in result + + result = (app.outdir / 'qux' / 'index.htm').read_text(encoding='utf8') + assert '<link rel="canonical" href="https://example.com/subdir/qux/index.htm" />' in result + + +@pytest.mark.sphinx('html', testroot='basic') +def test_default_html_math_renderer(app, status, warning): + assert app.builder.math_renderer_name == 'mathjax' + + +@pytest.mark.sphinx('html', testroot='basic', + confoverrides={'extensions': ['sphinx.ext.mathjax']}) +def test_html_math_renderer_is_mathjax(app, status, warning): + assert app.builder.math_renderer_name == 'mathjax' + + +@pytest.mark.sphinx('html', testroot='basic', + confoverrides={'extensions': ['sphinx.ext.imgmath']}) +def test_html_math_renderer_is_imgmath(app, status, warning): + assert app.builder.math_renderer_name == 'imgmath' + + +@pytest.mark.sphinx('html', testroot='basic', + confoverrides={'extensions': ['sphinxcontrib.jsmath', + 'sphinx.ext.imgmath']}) +def test_html_math_renderer_is_duplicated(make_app, app_params): + args, kwargs = app_params + with pytest.raises( + ConfigError, + match='Many math_renderers are registered. But no math_renderer is selected.', + ): + make_app(*args, **kwargs) + + +@pytest.mark.sphinx('html', testroot='basic', + confoverrides={'extensions': ['sphinx.ext.imgmath', + 'sphinx.ext.mathjax']}) +def test_html_math_renderer_is_duplicated2(app, status, warning): + # case of both mathjax and another math_renderer is loaded + assert app.builder.math_renderer_name == 'imgmath' # The another one is chosen + + +@pytest.mark.sphinx('html', testroot='basic', + confoverrides={'extensions': ['sphinxcontrib.jsmath', + 'sphinx.ext.imgmath'], + 'html_math_renderer': 'imgmath'}) +def test_html_math_renderer_is_chosen(app, status, warning): + assert app.builder.math_renderer_name == 'imgmath' + + +@pytest.mark.sphinx('html', testroot='basic', + confoverrides={'extensions': ['sphinxcontrib.jsmath', + 'sphinx.ext.mathjax'], + 'html_math_renderer': 'imgmath'}) +def test_html_math_renderer_is_mismatched(make_app, app_params): + args, kwargs = app_params + with pytest.raises(ConfigError, match="Unknown math_renderer 'imgmath' is given."): + make_app(*args, **kwargs) + + +@pytest.mark.sphinx('html', testroot='basic') +def test_html_pygments_style_default(app): + style = app.builder.highlighter.formatter_args.get('style') + assert style.__name__ == 'Alabaster' + + +@pytest.mark.sphinx('html', testroot='basic', + confoverrides={'pygments_style': 'sphinx'}) +def test_html_pygments_style_manually(app): + style = app.builder.highlighter.formatter_args.get('style') + assert style.__name__ == 'SphinxStyle' + + +@pytest.mark.sphinx('html', testroot='basic', + confoverrides={'html_theme': 'classic'}) +def test_html_pygments_for_classic_theme(app): + style = app.builder.highlighter.formatter_args.get('style') + assert style.__name__ == 'SphinxStyle' + + +@pytest.mark.sphinx('html', testroot='basic') +def test_html_dark_pygments_style_default(app): + assert app.builder.dark_highlighter is None + + +@pytest.mark.sphinx(testroot='basic', srcdir='validate_html_extra_path') +def test_validate_html_extra_path(app): + (app.confdir / '_static').mkdir(parents=True, exist_ok=True) + app.config.html_extra_path = [ + '/path/to/not_found', # not found + '_static', + app.outdir, # outdir + app.outdir / '_static', # inside outdir + ] + validate_html_extra_path(app, app.config) + assert app.config.html_extra_path == ['_static'] + + +@pytest.mark.sphinx(testroot='basic', srcdir='validate_html_static_path') +def test_validate_html_static_path(app): + (app.confdir / '_static').mkdir(parents=True, exist_ok=True) + app.config.html_static_path = [ + '/path/to/not_found', # not found + '_static', + app.outdir, # outdir + app.outdir / '_static', # inside outdir + ] + validate_html_static_path(app, app.config) + assert app.config.html_static_path == ['_static'] + + +@pytest.mark.sphinx(testroot='html_scaled_image_link') +def test_html_scaled_image_link(app): + app.build() + context = (app.outdir / 'index.html').read_text(encoding='utf8') + + # no scaled parameters + assert re.search('\n<img alt="_images/img.png" src="_images/img.png" />', context) + + # scaled_image_link + assert re.search('\n<a class="reference internal image-reference" href="_images/img.png">' + '<img alt="_images/img.png" src="_images/img.png" style="[^"]+" /></a>', + context) + + # no-scaled-link class disables the feature + assert re.search('\n<img alt="_images/img.png" class="no-scaled-link"' + ' src="_images/img.png" style="[^"]+" />', + context) + + +@pytest.mark.sphinx('html', testroot='reST-code-block', + confoverrides={'html_codeblock_linenos_style': 'table'}) +def test_html_codeblock_linenos_style_table(app): + app.build() + content = (app.outdir / 'index.html').read_text(encoding='utf8') + + assert ('<div class="linenodiv"><pre><span class="normal">1</span>\n' + '<span class="normal">2</span>\n' + '<span class="normal">3</span>\n' + '<span class="normal">4</span></pre></div>') in content + + +@pytest.mark.sphinx('html', testroot='reST-code-block', + confoverrides={'html_codeblock_linenos_style': 'inline'}) +def test_html_codeblock_linenos_style_inline(app): + app.build() + content = (app.outdir / 'index.html').read_text(encoding='utf8') + + assert '<span class="linenos">1</span>' in content + + +@pytest.mark.sphinx('html', testroot='highlight_options') +def test_highlight_options(app): + subject = app.builder.highlighter + with patch.object(subject, 'highlight_block', wraps=subject.highlight_block) as highlight: + app.build() + + call_args = highlight.call_args_list + assert len(call_args) == 3 + assert call_args[0] == call(ANY, 'default', force=False, linenos=False, + location=ANY, opts={'default_option': True}) + assert call_args[1] == call(ANY, 'python', force=False, linenos=False, + location=ANY, opts={'python_option': True}) + assert call_args[2] == call(ANY, 'java', force=False, linenos=False, + location=ANY, opts={}) + + +@pytest.mark.sphinx('html', testroot='highlight_options', + confoverrides={'highlight_options': {'default_option': True}}) +def test_highlight_options_old(app): + subject = app.builder.highlighter + with patch.object(subject, 'highlight_block', wraps=subject.highlight_block) as highlight: + app.build() + + call_args = highlight.call_args_list + assert len(call_args) == 3 + assert call_args[0] == call(ANY, 'default', force=False, linenos=False, + location=ANY, opts={'default_option': True}) + assert call_args[1] == call(ANY, 'python', force=False, linenos=False, + location=ANY, opts={}) + assert call_args[2] == call(ANY, 'java', force=False, linenos=False, + location=ANY, opts={}) + + +@pytest.mark.sphinx('html', testroot='basic', + confoverrides={'html_permalinks': False}) +def test_html_permalink_disable(app): + app.build() + content = (app.outdir / 'index.html').read_text(encoding='utf8') + + assert '<h1>The basic Sphinx documentation for testing</h1>' in content + + +@pytest.mark.sphinx('html', testroot='basic', + confoverrides={'html_permalinks_icon': '<span>[PERMALINK]</span>'}) +def test_html_permalink_icon(app): + app.build() + content = (app.outdir / 'index.html').read_text(encoding='utf8') + + assert ('<h1>The basic Sphinx documentation for testing<a class="headerlink" ' + 'href="#the-basic-sphinx-documentation-for-testing" ' + 'title="Link to this heading"><span>[PERMALINK]</span></a></h1>' in content) + + +@pytest.mark.sphinx('html', testroot='html_signaturereturn_icon') +def test_html_signaturereturn_icon(app): + app.build() + content = (app.outdir / 'index.html').read_text(encoding='utf8') + + assert ('<span class="sig-return-icon">→</span>' in content) + + +@pytest.mark.sphinx('html', testroot='reST-code-role') +def test_html_code_role(app): + app.build() + content = (app.outdir / 'index.html').read_text(encoding='utf8') + + common_content = ( + '<span class="k">def</span> <span class="nf">foo</span>' + '<span class="p">(</span>' + '<span class="mi">1</span> ' + '<span class="o">+</span> ' + '<span class="mi">2</span> ' + '<span class="o">+</span> ' + '<span class="kc">None</span> ' + '<span class="o">+</span> ' + '<span class="s2">"abc"</span>' + '<span class="p">):</span> ' + '<span class="k">pass</span>') + assert ('<p>Inline <code class="code highlight python docutils literal highlight-python">' + + common_content + '</code> code block</p>') in content + assert ('<div class="highlight-python notranslate">' + + '<div class="highlight"><pre><span></span>' + + common_content) in content + + +@pytest.mark.sphinx('html', testroot='root', + confoverrides={'option_emphasise_placeholders': True}) +def test_option_emphasise_placeholders(app, status, warning): + app.build() + content = (app.outdir / 'objects.html').read_text(encoding='utf8') + assert '<em><span class="pre">TYPE</span></em>' in content + assert '{TYPE}' not in content + assert ('<em><span class="pre">WHERE</span></em>' + '<span class="pre">-</span>' + '<em><span class="pre">COUNT</span></em>' in content) + assert '<span class="pre">{{value}}</span>' in content + assert ('<span class="pre">--plugin.option</span></span>' + '<a class="headerlink" href="#cmdoption-perl-plugin.option" title="Link to this definition">¶</a></dt>') in content + + +@pytest.mark.sphinx('html', testroot='root') +def test_option_emphasise_placeholders_default(app, status, warning): + app.build() + content = (app.outdir / 'objects.html').read_text(encoding='utf8') + assert '<span class="pre">={TYPE}</span>' in content + assert '<span class="pre">={WHERE}-{COUNT}</span></span>' in content + assert '<span class="pre">{client_name}</span>' in content + assert ('<span class="pre">--plugin.option</span></span>' + '<span class="sig-prename descclassname"></span>' + '<a class="headerlink" href="#cmdoption-perl-plugin.option" title="Link to this definition">¶</a></dt>') in content + + +@pytest.mark.sphinx('html', testroot='root') +def test_option_reference_with_value(app, status, warning): + app.build() + content = (app.outdir / 'objects.html').read_text(encoding='utf-8') + assert ('<span class="pre">-mapi</span></span><span class="sig-prename descclassname">' + '</span><a class="headerlink" href="#cmdoption-git-commit-mapi"') in content + assert 'first option <a class="reference internal" href="#cmdoption-git-commit-mapi">' in content + assert ('<a class="reference internal" href="#cmdoption-git-commit-mapi">' + '<code class="xref std std-option docutils literal notranslate"><span class="pre">-mapi[=xxx]</span></code></a>') in content + assert '<span class="pre">-mapi</span> <span class="pre">with_space</span>' in content + + +@pytest.mark.sphinx('html', testroot='theming') +def test_theme_options(app, status, warning): + app.build() + + result = (app.outdir / '_static' / 'documentation_options.js').read_text(encoding='utf8') + assert 'NAVIGATION_WITH_KEYS: false' in result + assert 'ENABLE_SEARCH_SHORTCUTS: true' in result + + +@pytest.mark.sphinx('html', testroot='theming', + confoverrides={'html_theme_options.navigation_with_keys': True, + 'html_theme_options.enable_search_shortcuts': False}) +def test_theme_options_with_override(app, status, warning): + app.build() + + result = (app.outdir / '_static' / 'documentation_options.js').read_text(encoding='utf8') + assert 'NAVIGATION_WITH_KEYS: true' in result + assert 'ENABLE_SEARCH_SHORTCUTS: false' in result + + +@pytest.mark.sphinx('html', testroot='build-html-theme-having-multiple-stylesheets') +def test_theme_having_multiple_stylesheets(app): + app.build() + content = (app.outdir / 'index.html').read_text(encoding='utf-8') + + assert '<link rel="stylesheet" type="text/css" href="_static/mytheme.css" />' in content + assert '<link rel="stylesheet" type="text/css" href="_static/extra.css" />' in content + + +@pytest.mark.sphinx('html', testroot='images') +def test_copy_images(app, status, warning): + app.build() + + images_dir = Path(app.outdir) / '_images' + images = {image.name for image in images_dir.rglob('*')} + assert images == { + 'img.png', + 'rimg.png', + 'rimg1.png', + 'svgimg.svg', + 'testimäge.png', + } diff --git a/tests/test_build_latex.py b/tests/test_build_latex.py new file mode 100644 index 0000000..e37a97e --- /dev/null +++ b/tests/test_build_latex.py @@ -0,0 +1,1755 @@ +"""Test the build process with LaTeX builder with the test root.""" + +import os +import re +import subprocess +from itertools import chain, product +from pathlib import Path +from shutil import copyfile +from subprocess import CalledProcessError + +import pytest + +from sphinx.builders.latex import default_latex_documents +from sphinx.config import Config +from sphinx.errors import SphinxError +from sphinx.ext.intersphinx import load_mappings, normalize_intersphinx_mapping +from sphinx.ext.intersphinx import setup as intersphinx_setup +from sphinx.testing.util import strip_escseq +from sphinx.util.osutil import ensuredir +from sphinx.writers.latex import LaTeXTranslator + +from .test_build_html import ENV_WARNINGS + +try: + from contextlib import chdir +except ImportError: + from sphinx.util.osutil import _chdir as chdir + +LATEX_ENGINES = ['pdflatex', 'lualatex', 'xelatex'] +DOCCLASSES = ['manual', 'howto'] +STYLEFILES = ['article.cls', 'fancyhdr.sty', 'titlesec.sty', 'amsmath.sty', + 'framed.sty', 'color.sty', 'fancyvrb.sty', + 'fncychap.sty', 'geometry.sty', 'kvoptions.sty', 'hyperref.sty', + 'booktabs.sty'] + +LATEX_WARNINGS = ENV_WARNINGS + """\ +%(root)s/index.rst:\\d+: WARNING: unknown option: '&option' +%(root)s/index.rst:\\d+: WARNING: citation not found: missing +%(root)s/index.rst:\\d+: WARNING: a suitable image for latex builder not found: foo.\\* +%(root)s/index.rst:\\d+: WARNING: Lexing literal_block ".*" as "c" resulted in an error at token: ".*". Retrying in relaxed mode. +""" + + +# only run latex if all needed packages are there +def kpsetest(*filenames): + try: + subprocess.run(['kpsewhich'] + list(filenames), capture_output=True, check=True) + return True + except (OSError, CalledProcessError): + return False # command not found or exit with non-zero + + +# compile latex document with app.config.latex_engine +def compile_latex_document(app, filename='python.tex', docclass='manual'): + # now, try to run latex over it + try: + with chdir(app.outdir): + # name latex output-directory according to both engine and docclass + # to avoid reuse of auxiliary files by one docclass from another + latex_outputdir = app.config.latex_engine + docclass + ensuredir(latex_outputdir) + # keep a copy of latex file for this engine in case test fails + copyfile(filename, latex_outputdir + '/' + filename) + args = [app.config.latex_engine, + '--halt-on-error', + '--interaction=nonstopmode', + '-output-directory=%s' % latex_outputdir, + filename] + subprocess.run(args, capture_output=True, check=True) + except OSError as exc: # most likely the latex executable was not found + raise pytest.skip.Exception from exc + except CalledProcessError as exc: + print(exc.stdout.decode('utf8')) + print(exc.stderr.decode('utf8')) + msg = f'{app.config.latex_engine} exited with return code {exc.returncode}' + raise AssertionError(msg) from exc + + +def skip_if_requested(testfunc): + if 'SKIP_LATEX_BUILD' in os.environ: + msg = 'Skip LaTeX builds because SKIP_LATEX_BUILD is set' + return pytest.mark.skipif(True, reason=msg)(testfunc) + else: + return testfunc + + +def skip_if_stylefiles_notfound(testfunc): + if kpsetest(*STYLEFILES) is False: + msg = 'not running latex, the required styles do not seem to be installed' + return pytest.mark.skipif(True, reason=msg)(testfunc) + else: + return testfunc + + +@skip_if_requested +@skip_if_stylefiles_notfound +@pytest.mark.parametrize( + ('engine', 'docclass', 'python_maximum_signature_line_length'), + # Only running test with `python_maximum_signature_line_length` not None with last + # LaTeX engine to reduce testing time, as if this configuration does not fail with + # one engine, it's almost impossible it would fail with another. + chain( + product(LATEX_ENGINES[:-1], DOCCLASSES, [None]), + product([LATEX_ENGINES[-1]], DOCCLASSES, [1]), + ), +) +@pytest.mark.sphinx('latex', freshenv=True) +def test_build_latex_doc(app, status, warning, engine, docclass, python_maximum_signature_line_length): + app.config.python_maximum_signature_line_length = python_maximum_signature_line_length + app.config.intersphinx_mapping = { + 'sphinx': ('https://www.sphinx-doc.org/en/master/', None), + } + intersphinx_setup(app) + app.config.latex_engine = engine + app.config.latex_documents = [app.config.latex_documents[0][:4] + (docclass,)] + if engine == 'xelatex': + app.config.latex_table_style = ['booktabs'] + elif engine == 'lualatex': + app.config.latex_table_style = ['colorrows'] + normalize_intersphinx_mapping(app, app.config) + load_mappings(app) + app.builder.init() + LaTeXTranslator.ignore_missing_images = True + app.builder.build_all() + + # file from latex_additional_files + assert (app.outdir / 'svgimg.svg').is_file() + + compile_latex_document(app, 'sphinxtests.tex', docclass) + + +@pytest.mark.sphinx('latex') +def test_writer(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'sphinxtests.tex').read_text(encoding='utf8') + + assert ('\\begin{sphinxfigure-in-table}\n\\centering\n\\capstart\n' + '\\noindent\\sphinxincludegraphics{{img}.png}\n' + '\\sphinxfigcaption{figure in table}\\label{\\detokenize{markup:id8}}' + '\\end{sphinxfigure-in-table}\\relax' in result) + + assert ('\\begin{wrapfigure}{r}{0pt}\n\\centering\n' + '\\noindent\\sphinxincludegraphics{{rimg}.png}\n' + '\\caption{figure with align option}\\label{\\detokenize{markup:id9}}' + '\\end{wrapfigure}\n\n' + '\\mbox{}\\par\\vskip-\\dimexpr\\baselineskip+\\parskip\\relax' in result) + + assert ('\\begin{wrapfigure}{r}{0.500\\linewidth}\n\\centering\n' + '\\noindent\\sphinxincludegraphics{{rimg}.png}\n' + '\\caption{figure with align \\& figwidth option}' + '\\label{\\detokenize{markup:id10}}' + '\\end{wrapfigure}\n\n' + '\\mbox{}\\par\\vskip-\\dimexpr\\baselineskip+\\parskip\\relax' in result) + + assert ('\\begin{wrapfigure}{r}{3cm}\n\\centering\n' + '\\noindent\\sphinxincludegraphics[width=3cm]{{rimg}.png}\n' + '\\caption{figure with align \\& width option}' + '\\label{\\detokenize{markup:id11}}' + '\\end{wrapfigure}\n\n' + '\\mbox{}\\par\\vskip-\\dimexpr\\baselineskip+\\parskip\\relax' in result) + + assert 'Footnotes' not in result + + assert ('\\begin{sphinxseealso}{See also:}\n\n' + '\\sphinxAtStartPar\n' + 'something, something else, something more\n' + '\\begin{description}\n' + '\\sphinxlineitem{\\sphinxhref{http://www.google.com}{Google}}\n' + '\\sphinxAtStartPar\n' + 'For everything.\n' + '\n' + '\\end{description}\n' + '\n\n\\end{sphinxseealso}\n\n' in result) + + +@pytest.mark.sphinx('latex', testroot='warnings', freshenv=True) +def test_latex_warnings(app, status, warning): + app.builder.build_all() + + warnings = strip_escseq(re.sub(re.escape(os.sep) + '{1,2}', '/', warning.getvalue())) + warnings_exp = LATEX_WARNINGS % { + 'root': re.escape(app.srcdir.as_posix())} + assert re.match(warnings_exp + '$', warnings), \ + "Warnings don't match:\n" + \ + '--- Expected (regex):\n' + warnings_exp + \ + '--- Got:\n' + warnings + + +@pytest.mark.sphinx('latex', testroot='basic') +def test_latex_basic(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'test.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert r'\title{The basic Sphinx documentation for testing}' in result + assert r'\release{}' in result + assert r'\renewcommand{\releasename}{}' in result + + +@pytest.mark.sphinx('latex', testroot='basic', + confoverrides={ + 'latex_documents': [('index', 'test.tex', 'title', 'author', 'manual')], + }) +def test_latex_basic_manual(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'test.tex').read_text(encoding='utf8') + print(result) + assert r'\def\sphinxdocclass{report}' in result + assert r'\documentclass[letterpaper,10pt,english]{sphinxmanual}' in result + + +@pytest.mark.sphinx('latex', testroot='basic', + confoverrides={ + 'latex_documents': [('index', 'test.tex', 'title', 'author', 'howto')], + }) +def test_latex_basic_howto(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'test.tex').read_text(encoding='utf8') + print(result) + assert r'\def\sphinxdocclass{article}' in result + assert r'\documentclass[letterpaper,10pt,english]{sphinxhowto}' in result + + +@pytest.mark.sphinx('latex', testroot='basic', + confoverrides={ + 'language': 'ja', + 'latex_documents': [('index', 'test.tex', 'title', 'author', 'manual')], + }) +def test_latex_basic_manual_ja(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'test.tex').read_text(encoding='utf8') + print(result) + assert r'\def\sphinxdocclass{ujbook}' in result + assert r'\documentclass[letterpaper,10pt,dvipdfmx]{sphinxmanual}' in result + + +@pytest.mark.sphinx('latex', testroot='basic', + confoverrides={ + 'language': 'ja', + 'latex_documents': [('index', 'test.tex', 'title', 'author', 'howto')], + }) +def test_latex_basic_howto_ja(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'test.tex').read_text(encoding='utf8') + print(result) + assert r'\def\sphinxdocclass{ujreport}' in result + assert r'\documentclass[letterpaper,10pt,dvipdfmx]{sphinxhowto}' in result + + +@pytest.mark.sphinx('latex', testroot='latex-theme') +def test_latex_theme(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + assert r'\def\sphinxdocclass{book}' in result + assert r'\documentclass[a4paper,12pt,english]{sphinxbook}' in result + + +@pytest.mark.sphinx('latex', testroot='latex-theme', + confoverrides={'latex_elements': {'papersize': 'b5paper', + 'pointsize': '9pt'}}) +def test_latex_theme_papersize(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + assert r'\def\sphinxdocclass{book}' in result + assert r'\documentclass[b5paper,9pt,english]{sphinxbook}' in result + + +@pytest.mark.sphinx('latex', testroot='latex-theme', + confoverrides={'latex_theme_options': {'papersize': 'b5paper', + 'pointsize': '9pt'}}) +def test_latex_theme_options(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + assert r'\def\sphinxdocclass{book}' in result + assert r'\documentclass[b5paper,9pt,english]{sphinxbook}' in result + + +@pytest.mark.sphinx('latex', testroot='basic', confoverrides={'language': 'zh'}) +def test_latex_additional_settings_for_language_code(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'test.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert r'\usepackage{xeCJK}' in result + + +@pytest.mark.sphinx('latex', testroot='basic', confoverrides={'language': 'el'}) +def test_latex_additional_settings_for_greek(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'test.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert '\\usepackage{polyglossia}\n\\setmainlanguage{greek}' in result + assert '\\newfontfamily\\greekfonttt{FreeMono}' in result + + +@pytest.mark.sphinx('latex', testroot='latex-title') +def test_latex_title_after_admonitions(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'test.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert '\\title{test\\sphinxhyphen{}latex\\sphinxhyphen{}title}' in result + + +@pytest.mark.sphinx('latex', testroot='basic', + confoverrides={'release': '1.0_0'}) +def test_latex_release(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'test.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert r'\release{1.0\_0}' in result + assert r'\renewcommand{\releasename}{Release}' in result + + +@pytest.mark.sphinx('latex', testroot='numfig', + confoverrides={'numfig': True}) +def test_numref(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert ('\\hyperref[\\detokenize{index:fig1}]' + '{Fig.\\@ \\ref{\\detokenize{index:fig1}}}') in result + assert ('\\hyperref[\\detokenize{baz:fig22}]' + '{Figure\\ref{\\detokenize{baz:fig22}}}') in result + assert ('\\hyperref[\\detokenize{index:table-1}]' + '{Table \\ref{\\detokenize{index:table-1}}}') in result + assert ('\\hyperref[\\detokenize{baz:table22}]' + '{Table:\\ref{\\detokenize{baz:table22}}}') in result + assert ('\\hyperref[\\detokenize{index:code-1}]' + '{Listing \\ref{\\detokenize{index:code-1}}}') in result + assert ('\\hyperref[\\detokenize{baz:code22}]' + '{Code\\sphinxhyphen{}\\ref{\\detokenize{baz:code22}}}') in result + assert ('\\hyperref[\\detokenize{foo:foo}]' + '{Section \\ref{\\detokenize{foo:foo}}}') in result + assert ('\\hyperref[\\detokenize{bar:bar-a}]' + '{Section \\ref{\\detokenize{bar:bar-a}}}') in result + assert ('\\hyperref[\\detokenize{index:fig1}]{Fig.\\ref{\\detokenize{index:fig1}} ' + '\\nameref{\\detokenize{index:fig1}}}') in result + assert ('\\hyperref[\\detokenize{foo:foo}]{Sect.\\ref{\\detokenize{foo:foo}} ' + '\\nameref{\\detokenize{foo:foo}}}') in result + + # sphinxmessages.sty + result = (app.outdir / 'sphinxmessages.sty').read_text(encoding='utf8') + print(result) + assert r'\addto\captionsenglish{\renewcommand{\figurename}{Fig.\@{} }}' in result + assert r'\addto\captionsenglish{\renewcommand{\tablename}{Table }}' in result + assert r'\addto\captionsenglish{\renewcommand{\literalblockname}{Listing}}' in result + + +@pytest.mark.sphinx( + 'latex', testroot='numfig', + confoverrides={'numfig': True, + 'numfig_format': {'figure': 'Figure:%s', + 'table': 'Tab_%s', + 'code-block': 'Code-%s', + 'section': 'SECTION-%s'}}) +def test_numref_with_prefix1(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert '\\ref{\\detokenize{index:fig1}}' in result + assert '\\ref{\\detokenize{baz:fig22}}' in result + assert '\\ref{\\detokenize{index:table-1}}' in result + assert '\\ref{\\detokenize{baz:table22}}' in result + assert '\\ref{\\detokenize{index:code-1}}' in result + assert '\\ref{\\detokenize{baz:code22}}' in result + assert ('\\hyperref[\\detokenize{index:fig1}]' + '{Figure:\\ref{\\detokenize{index:fig1}}}') in result + assert ('\\hyperref[\\detokenize{baz:fig22}]' + '{Figure\\ref{\\detokenize{baz:fig22}}}') in result + assert ('\\hyperref[\\detokenize{index:table-1}]' + '{Tab\\_\\ref{\\detokenize{index:table-1}}}') in result + assert ('\\hyperref[\\detokenize{baz:table22}]' + '{Table:\\ref{\\detokenize{baz:table22}}}') in result + assert ('\\hyperref[\\detokenize{index:code-1}]' + '{Code\\sphinxhyphen{}\\ref{\\detokenize{index:code-1}}}') in result + assert ('\\hyperref[\\detokenize{baz:code22}]' + '{Code\\sphinxhyphen{}\\ref{\\detokenize{baz:code22}}}') in result + assert ('\\hyperref[\\detokenize{foo:foo}]' + '{SECTION\\sphinxhyphen{}\\ref{\\detokenize{foo:foo}}}') in result + assert ('\\hyperref[\\detokenize{bar:bar-a}]' + '{SECTION\\sphinxhyphen{}\\ref{\\detokenize{bar:bar-a}}}') in result + assert ('\\hyperref[\\detokenize{index:fig1}]{Fig.\\ref{\\detokenize{index:fig1}} ' + '\\nameref{\\detokenize{index:fig1}}}') in result + assert ('\\hyperref[\\detokenize{foo:foo}]{Sect.\\ref{\\detokenize{foo:foo}} ' + '\\nameref{\\detokenize{foo:foo}}}') in result + + # sphinxmessages.sty + result = (app.outdir / 'sphinxmessages.sty').read_text(encoding='utf8') + print(result) + assert r'\addto\captionsenglish{\renewcommand{\figurename}{Figure:}}' in result + assert r'\addto\captionsenglish{\renewcommand{\tablename}{Tab\_}}' in result + assert r'\addto\captionsenglish{\renewcommand{\literalblockname}{Code-}}' in result + + +@pytest.mark.sphinx( + 'latex', testroot='numfig', + confoverrides={'numfig': True, + 'numfig_format': {'figure': 'Figure:%s.', + 'table': 'Tab_%s:', + 'code-block': 'Code-%s | ', + 'section': 'SECTION_%s_'}}) +def test_numref_with_prefix2(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert ('\\hyperref[\\detokenize{index:fig1}]' + '{Figure:\\ref{\\detokenize{index:fig1}}.\\@}') in result + assert ('\\hyperref[\\detokenize{baz:fig22}]' + '{Figure\\ref{\\detokenize{baz:fig22}}}') in result + assert ('\\hyperref[\\detokenize{index:table-1}]' + '{Tab\\_\\ref{\\detokenize{index:table-1}}:}') in result + assert ('\\hyperref[\\detokenize{baz:table22}]' + '{Table:\\ref{\\detokenize{baz:table22}}}') in result + assert ('\\hyperref[\\detokenize{index:code-1}]{Code\\sphinxhyphen{}\\ref{\\detokenize{index:code-1}} ' + '| }') in result + assert ('\\hyperref[\\detokenize{baz:code22}]' + '{Code\\sphinxhyphen{}\\ref{\\detokenize{baz:code22}}}') in result + assert ('\\hyperref[\\detokenize{foo:foo}]' + '{SECTION\\_\\ref{\\detokenize{foo:foo}}\\_}') in result + assert ('\\hyperref[\\detokenize{bar:bar-a}]' + '{SECTION\\_\\ref{\\detokenize{bar:bar-a}}\\_}') in result + assert ('\\hyperref[\\detokenize{index:fig1}]{Fig.\\ref{\\detokenize{index:fig1}} ' + '\\nameref{\\detokenize{index:fig1}}}') in result + assert ('\\hyperref[\\detokenize{foo:foo}]{Sect.\\ref{\\detokenize{foo:foo}} ' + '\\nameref{\\detokenize{foo:foo}}}') in result + + # sphinxmessages.sty + result = (app.outdir / 'sphinxmessages.sty').read_text(encoding='utf8') + print(result) + assert r'\addto\captionsenglish{\renewcommand{\figurename}{Figure:}}' in result + assert r'\def\fnum@figure{\figurename\thefigure{}.}' in result + assert r'\addto\captionsenglish{\renewcommand{\tablename}{Tab\_}}' in result + assert r'\def\fnum@table{\tablename\thetable{}:}' in result + assert r'\addto\captionsenglish{\renewcommand{\literalblockname}{Code-}}' in result + + +@pytest.mark.sphinx( + 'latex', testroot='numfig', + confoverrides={'numfig': True, 'language': 'ja'}) +def test_numref_with_language_ja(app, status, warning): + app.build() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert ('\\hyperref[\\detokenize{index:fig1}]' + '{\u56f3 \\ref{\\detokenize{index:fig1}}}') in result + assert ('\\hyperref[\\detokenize{baz:fig22}]' + '{Figure\\ref{\\detokenize{baz:fig22}}}') in result + assert ('\\hyperref[\\detokenize{index:table-1}]' + '{\u8868 \\ref{\\detokenize{index:table-1}}}') in result + assert ('\\hyperref[\\detokenize{baz:table22}]' + '{Table:\\ref{\\detokenize{baz:table22}}}') in result + assert ('\\hyperref[\\detokenize{index:code-1}]' + '{\u30ea\u30b9\u30c8 \\ref{\\detokenize{index:code-1}}}') in result + assert ('\\hyperref[\\detokenize{baz:code22}]' + '{Code\\sphinxhyphen{}\\ref{\\detokenize{baz:code22}}}') in result + assert ('\\hyperref[\\detokenize{foo:foo}]' + '{\\ref{\\detokenize{foo:foo}} \u7ae0}') in result + assert ('\\hyperref[\\detokenize{bar:bar-a}]' + '{\\ref{\\detokenize{bar:bar-a}} \u7ae0}') in result + assert ('\\hyperref[\\detokenize{index:fig1}]{Fig.\\ref{\\detokenize{index:fig1}} ' + '\\nameref{\\detokenize{index:fig1}}}') in result + assert ('\\hyperref[\\detokenize{foo:foo}]{Sect.\\ref{\\detokenize{foo:foo}} ' + '\\nameref{\\detokenize{foo:foo}}}') in result + + # sphinxmessages.sty + result = (app.outdir / 'sphinxmessages.sty').read_text(encoding='utf8') + print(result) + assert '\\@iden{\\renewcommand{\\figurename}{図 }}' in result + assert '\\@iden{\\renewcommand{\\tablename}{表 }}' in result + assert '\\@iden{\\renewcommand{\\literalblockname}{リスト}}' in result + + +@pytest.mark.sphinx('latex', testroot='latex-numfig') +def test_latex_obey_numfig_is_false(app, status, warning): + app.builder.build_all() + + result = (app.outdir / 'SphinxManual.tex').read_text(encoding='utf8') + assert '\\usepackage{sphinx}' in result + + result = (app.outdir / 'SphinxHowTo.tex').read_text(encoding='utf8') + assert '\\usepackage{sphinx}' in result + + +@pytest.mark.sphinx( + 'latex', testroot='latex-numfig', + confoverrides={'numfig': True, 'numfig_secnum_depth': 0}) +def test_latex_obey_numfig_secnum_depth_is_zero(app, status, warning): + app.builder.build_all() + + result = (app.outdir / 'SphinxManual.tex').read_text(encoding='utf8') + assert '\\usepackage[,nonumfigreset,mathnumfig]{sphinx}' in result + + result = (app.outdir / 'SphinxHowTo.tex').read_text(encoding='utf8') + assert '\\usepackage[,nonumfigreset,mathnumfig]{sphinx}' in result + + +@pytest.mark.sphinx( + 'latex', testroot='latex-numfig', + confoverrides={'numfig': True, 'numfig_secnum_depth': 2}) +def test_latex_obey_numfig_secnum_depth_is_two(app, status, warning): + app.builder.build_all() + + result = (app.outdir / 'SphinxManual.tex').read_text(encoding='utf8') + assert '\\usepackage[,numfigreset=2,mathnumfig]{sphinx}' in result + + result = (app.outdir / 'SphinxHowTo.tex').read_text(encoding='utf8') + assert '\\usepackage[,numfigreset=3,mathnumfig]{sphinx}' in result + + +@pytest.mark.sphinx( + 'latex', testroot='latex-numfig', + confoverrides={'numfig': True, 'math_numfig': False}) +def test_latex_obey_numfig_but_math_numfig_false(app, status, warning): + app.builder.build_all() + + result = (app.outdir / 'SphinxManual.tex').read_text(encoding='utf8') + assert '\\usepackage[,numfigreset=1]{sphinx}' in result + + result = (app.outdir / 'SphinxHowTo.tex').read_text(encoding='utf8') + assert '\\usepackage[,numfigreset=2]{sphinx}' in result + + +@pytest.mark.sphinx('latex', testroot='basic') +def test_latex_add_latex_package(app, status, warning): + app.add_latex_package('foo') + app.add_latex_package('bar', 'baz') + app.builder.build_all() + result = (app.outdir / 'test.tex').read_text(encoding='utf8') + assert '\\usepackage{foo}' in result + assert '\\usepackage[baz]{bar}' in result + + +@pytest.mark.sphinx('latex', testroot='latex-babel') +def test_babel_with_no_language_settings(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert '\\documentclass[letterpaper,10pt,english]{sphinxmanual}' in result + assert '\\usepackage{babel}' in result + assert '\\usepackage{tgtermes}' in result + assert '\\usepackage[Bjarne]{fncychap}' in result + assert ('\\addto\\captionsenglish{\\renewcommand{\\contentsname}{Table of content}}\n' + in result) + assert '\\shorthandoff{"}' in result + + # sphinxmessages.sty + result = (app.outdir / 'sphinxmessages.sty').read_text(encoding='utf8') + print(result) + assert r'\def\pageautorefname{page}' in result + assert r'\addto\captionsenglish{\renewcommand{\figurename}{Fig.\@{} }}' in result + assert r'\addto\captionsenglish{\renewcommand{\tablename}{Table.\@{} }}' in result + + +@pytest.mark.sphinx( + 'latex', testroot='latex-babel', + confoverrides={'language': 'de'}) +def test_babel_with_language_de(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert '\\documentclass[letterpaper,10pt,ngerman]{sphinxmanual}' in result + assert '\\usepackage{babel}' in result + assert '\\usepackage{tgtermes}' in result + assert '\\usepackage[Sonny]{fncychap}' in result + assert ('\\addto\\captionsngerman{\\renewcommand{\\contentsname}{Table of content}}\n' + in result) + assert '\\shorthandoff{"}' in result + + # sphinxmessages.sty + result = (app.outdir / 'sphinxmessages.sty').read_text(encoding='utf8') + print(result) + assert r'\def\pageautorefname{Seite}' in result + assert r'\addto\captionsngerman{\renewcommand{\figurename}{Fig.\@{} }}' in result + assert r'\addto\captionsngerman{\renewcommand{\tablename}{Table.\@{} }}' in result + + +@pytest.mark.sphinx( + 'latex', testroot='latex-babel', + confoverrides={'language': 'ru'}) +def test_babel_with_language_ru(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert '\\documentclass[letterpaper,10pt,russian]{sphinxmanual}' in result + assert '\\usepackage{babel}' in result + assert '\\usepackage{tgtermes}' not in result + assert '\\usepackage[Sonny]{fncychap}' in result + assert ('\\addto\\captionsrussian{\\renewcommand{\\contentsname}{Table of content}}\n' + in result) + assert '\\shorthandoff{"}' in result + + # sphinxmessages.sty + result = (app.outdir / 'sphinxmessages.sty').read_text(encoding='utf8') + print(result) + assert r'\def\pageautorefname{страница}' in result + assert r'\addto\captionsrussian{\renewcommand{\figurename}{Fig.\@{} }}' in result + assert r'\addto\captionsrussian{\renewcommand{\tablename}{Table.\@{} }}' in result + + +@pytest.mark.sphinx( + 'latex', testroot='latex-babel', + confoverrides={'language': 'tr'}) +def test_babel_with_language_tr(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert '\\documentclass[letterpaper,10pt,turkish]{sphinxmanual}' in result + assert '\\usepackage{babel}' in result + assert '\\usepackage{tgtermes}' in result + assert '\\usepackage[Sonny]{fncychap}' in result + assert ('\\addto\\captionsturkish{\\renewcommand{\\contentsname}{Table of content}}\n' + in result) + assert '\\shorthandoff{=}' in result + + # sphinxmessages.sty + result = (app.outdir / 'sphinxmessages.sty').read_text(encoding='utf8') + print(result) + assert r'\def\pageautorefname{sayfa}' in result + assert r'\addto\captionsturkish{\renewcommand{\figurename}{Fig.\@{} }}' in result + assert r'\addto\captionsturkish{\renewcommand{\tablename}{Table.\@{} }}' in result + + +@pytest.mark.sphinx( + 'latex', testroot='latex-babel', + confoverrides={'language': 'ja'}) +def test_babel_with_language_ja(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert '\\documentclass[letterpaper,10pt,dvipdfmx]{sphinxmanual}' in result + assert '\\usepackage{babel}' not in result + assert '\\usepackage{tgtermes}' in result + assert '\\usepackage[Sonny]{fncychap}' not in result + assert '\\renewcommand{\\contentsname}{Table of content}\n' in result + assert '\\shorthandoff' not in result + + # sphinxmessages.sty + result = (app.outdir / 'sphinxmessages.sty').read_text(encoding='utf8') + print(result) + assert r'\def\pageautorefname{ページ}' in result + assert '\\@iden{\\renewcommand{\\figurename}{Fig.\\@{} }}' in result + assert '\\@iden{\\renewcommand{\\tablename}{Table.\\@{} }}' in result + + +@pytest.mark.sphinx( + 'latex', testroot='latex-babel', + confoverrides={'language': 'unknown'}) +def test_babel_with_unknown_language(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert '\\documentclass[letterpaper,10pt,english]{sphinxmanual}' in result + assert '\\usepackage{babel}' in result + assert '\\usepackage{tgtermes}' in result + assert '\\usepackage[Sonny]{fncychap}' in result + assert ('\\addto\\captionsenglish{\\renewcommand{\\contentsname}{Table of content}}\n' + in result) + assert '\\shorthandoff' in result + + assert "WARNING: no Babel option known for language 'unknown'" in warning.getvalue() + + # sphinxmessages.sty + result = (app.outdir / 'sphinxmessages.sty').read_text(encoding='utf8') + print(result) + assert r'\def\pageautorefname{page}' in result + assert r'\addto\captionsenglish{\renewcommand{\figurename}{Fig.\@{} }}' in result + assert r'\addto\captionsenglish{\renewcommand{\tablename}{Table.\@{} }}' in result + + +@pytest.mark.sphinx( + 'latex', testroot='latex-babel', + confoverrides={'language': 'de', 'latex_engine': 'lualatex'}) +def test_polyglossia_with_language_de(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert '\\documentclass[letterpaper,10pt,german]{sphinxmanual}' in result + assert '\\usepackage{polyglossia}' in result + assert '\\setmainlanguage[spelling=new]{german}' in result + assert '\\usepackage{tgtermes}' not in result + assert '\\usepackage[Sonny]{fncychap}' in result + assert ('\\addto\\captionsgerman{\\renewcommand{\\contentsname}{Table of content}}\n' + in result) + assert '\\shorthandoff' not in result + + # sphinxmessages.sty + result = (app.outdir / 'sphinxmessages.sty').read_text(encoding='utf8') + print(result) + assert r'\def\pageautorefname{Seite}' in result + assert r'\addto\captionsgerman{\renewcommand{\figurename}{Fig.\@{} }}' in result + assert r'\addto\captionsgerman{\renewcommand{\tablename}{Table.\@{} }}' in result + + +@pytest.mark.sphinx( + 'latex', testroot='latex-babel', + confoverrides={'language': 'de-1901', 'latex_engine': 'lualatex'}) +def test_polyglossia_with_language_de_1901(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert '\\documentclass[letterpaper,10pt,german]{sphinxmanual}' in result + assert '\\usepackage{polyglossia}' in result + assert '\\setmainlanguage[spelling=old]{german}' in result + assert '\\usepackage{tgtermes}' not in result + assert '\\usepackage[Sonny]{fncychap}' in result + assert ('\\addto\\captionsgerman{\\renewcommand{\\contentsname}{Table of content}}\n' + in result) + assert '\\shorthandoff' not in result + + # sphinxmessages.sty + result = (app.outdir / 'sphinxmessages.sty').read_text(encoding='utf8') + print(result) + assert r'\def\pageautorefname{page}' in result + assert r'\addto\captionsgerman{\renewcommand{\figurename}{Fig.\@{} }}' in result + assert r'\addto\captionsgerman{\renewcommand{\tablename}{Table.\@{} }}' in result + + +@pytest.mark.sphinx('latex') +def test_footnote(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'sphinxtests.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert ('\\sphinxAtStartPar\n%\n\\begin{footnote}[1]\\sphinxAtStartFootnote\n' + 'numbered\n%\n\\end{footnote}') in result + assert ('\\begin{footnote}[2]\\sphinxAtStartFootnote\nauto numbered\n%\n' + '\\end{footnote}') in result + assert '\\begin{footnote}[3]\\sphinxAtStartFootnote\nnamed\n%\n\\end{footnote}' in result + assert '\\sphinxcite{footnote:bar}' in result + assert ('\\bibitem[bar]{footnote:bar}\n\\sphinxAtStartPar\ncite\n') in result + assert '\\sphinxcaption{Table caption \\sphinxfootnotemark[4]' in result + assert ('\\sphinxmidrule\n\\sphinxtableatstartofbodyhook%\n' + '\\begin{footnotetext}[4]\\sphinxAtStartFootnote\n' + 'footnote in table caption\n%\n\\end{footnotetext}\\ignorespaces %\n' + '\\begin{footnotetext}[5]\\sphinxAtStartFootnote\n' + 'footnote in table header\n%\n\\end{footnotetext}\\ignorespaces ' + '\n\\sphinxAtStartPar\n' + 'VIDIOC\\_CROPCAP\n&\n\\sphinxAtStartPar\n') in result + assert ('Information about VIDIOC\\_CROPCAP %\n' + '\\begin{footnote}[6]\\sphinxAtStartFootnote\n' + 'footnote in table not in header\n%\n\\end{footnote}\n\\\\\n' + '\\sphinxbottomrule\n\\end{tabulary}\n' + '\\sphinxtableafterendhook\\par\n\\sphinxattableend\\end{savenotes}\n') in result + + +@pytest.mark.sphinx('latex', testroot='footnotes') +def test_reference_in_caption_and_codeblock_in_footnote(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert ('\\caption{This is the figure caption with a reference to ' + '\\sphinxcite{index:authoryear}.}' in result) + assert '\\chapter{The section with a reference to {[}AuthorYear{]}}' in result + assert ('\\sphinxcaption{The table title with a reference' + ' to {[}AuthorYear{]}}' in result) + assert '\\subsubsection*{The rubric title with a reference to {[}AuthorYear{]}}' in result + assert ('\\chapter{The section with a reference to \\sphinxfootnotemark[6]}\n' + '\\label{\\detokenize{index:the-section-with-a-reference-to}}' + '%\n\\begin{footnotetext}[6]\\sphinxAtStartFootnote\n' + 'Footnote in section\n%\n\\end{footnotetext}') in result + assert ('\\caption{This is the figure caption with a footnote to ' + '\\sphinxfootnotemark[8].}\\label{\\detokenize{index:id35}}\\end{figure}\n' + '%\n\\begin{footnotetext}[8]\\sphinxAtStartFootnote\n' + 'Footnote in caption\n%\n\\end{footnotetext}') in result + assert ('\\sphinxcaption{footnote \\sphinxfootnotemark[9] in ' + 'caption of normal table}\\label{\\detokenize{index:id36}}') in result + assert ('\\caption{footnote \\sphinxfootnotemark[10] ' + 'in caption \\sphinxfootnotemark[11] of longtable\\strut}') in result + assert ('\\endlastfoot\n\\sphinxtableatstartofbodyhook\n%\n' + '\\begin{footnotetext}[10]\\sphinxAtStartFootnote\n' + 'Foot note in longtable\n%\n\\end{footnotetext}\\ignorespaces %\n' + '\\begin{footnotetext}[11]\\sphinxAtStartFootnote\n' + 'Second footnote in caption of longtable\n') in result + assert ('This is a reference to the code\\sphinxhyphen{}block in the footnote:\n' + '{\\hyperref[\\detokenize{index:codeblockinfootnote}]' + '{\\sphinxcrossref{\\DUrole{std,std-ref}{I am in a footnote}}}}') in result + assert ('&\n\\sphinxAtStartPar\nThis is one more footnote with some code in it %\n' + '\\begin{footnote}[12]\\sphinxAtStartFootnote\n' + 'Third footnote in longtable\n') in result + assert ('\\end{sphinxVerbatim}\n%\n\\end{footnote}.\n') in result + assert '\\begin{sphinxVerbatim}[commandchars=\\\\\\{\\}]' in result + + +@pytest.mark.sphinx('latex', testroot='footnotes') +def test_footnote_referred_multiple_times(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + + assert ('Explicitly numbered footnote: %\n' + '\\begin{footnote}[100]' + '\\sphinxAtStartFootnote\nNumbered footnote\n%\n' + '\\end{footnote} \\sphinxfootnotemark[100]\n' + in result) + assert ('Named footnote: %\n' + '\\begin{footnote}[13]' + '\\sphinxAtStartFootnote\nNamed footnote\n%\n' + '\\end{footnote} \\sphinxfootnotemark[13]\n' + in result) + + +@pytest.mark.sphinx( + 'latex', testroot='footnotes', + confoverrides={'latex_show_urls': 'inline'}) +def test_latex_show_urls_is_inline(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert ('Same footnote number %\n' + '\\begin{footnote}[1]\\sphinxAtStartFootnote\n' + 'footnote in bar\n%\n\\end{footnote} in bar.rst') in result + assert ('Auto footnote number %\n\\begin{footnote}[1]\\sphinxAtStartFootnote\n' + 'footnote in baz\n%\n\\end{footnote} in baz.rst') in result + assert ('\\phantomsection\\label{\\detokenize{index:id38}}' + '{\\hyperref[\\detokenize{index:the-section' + '-with-a-reference-to-authoryear}]' + '{\\sphinxcrossref{The section with a reference to ' + '\\sphinxcite{index:authoryear}}}}') in result + assert ('\\phantomsection\\label{\\detokenize{index:id39}}' + '{\\hyperref[\\detokenize{index:the-section-with-a-reference-to}]' + '{\\sphinxcrossref{The section with a reference to }}}' in result) + assert ('First footnote: %\n\\begin{footnote}[2]\\sphinxAtStartFootnote\n' + 'First\n%\n\\end{footnote}') in result + assert ('Second footnote: %\n' + '\\begin{footnote}[1]\\sphinxAtStartFootnote\n' + 'Second\n%\n\\end{footnote}\n') in result + assert '\\sphinxhref{http://sphinx-doc.org/}{Sphinx} (http://sphinx\\sphinxhyphen{}doc.org/)' in result + assert ('Third footnote: %\n\\begin{footnote}[3]\\sphinxAtStartFootnote\n' + 'Third \\sphinxfootnotemark[4]\n%\n\\end{footnote}%\n' + '\\begin{footnotetext}[4]\\sphinxAtStartFootnote\n' + 'Footnote inside footnote\n%\n\\end{footnotetext}\\ignorespaces') in result + assert ('Fourth footnote: %\n\\begin{footnote}[5]\\sphinxAtStartFootnote\n' + 'Fourth\n%\n\\end{footnote}\n') in result + assert ('\\sphinxhref{http://sphinx-doc.org/~test/}{URL including tilde} ' + '(http://sphinx\\sphinxhyphen{}doc.org/\\textasciitilde{}test/)') in result + assert ('\\sphinxlineitem{\\sphinxhref{http://sphinx-doc.org/}{URL in term} ' + '(http://sphinx\\sphinxhyphen{}doc.org/)}\n' + '\\sphinxAtStartPar\nDescription' in result) + assert ('\\sphinxlineitem{Footnote in term \\sphinxfootnotemark[7]}%\n' + '\\begin{footnotetext}[7]\\sphinxAtStartFootnote\n' in result) + assert ('\\sphinxlineitem{\\sphinxhref{http://sphinx-doc.org/}{URL in term} ' + '(http://sphinx\\sphinxhyphen{}doc.org/)}\n' + '\\sphinxAtStartPar\nDescription' in result) + assert ('\\sphinxlineitem{Footnote in term \\sphinxfootnotemark[7]}%\n' + '\\begin{footnotetext}[7]\\sphinxAtStartFootnote\n' + 'Footnote in term\n%\n\\end{footnotetext}\\ignorespaces ' + '\n\\sphinxAtStartPar\nDescription') in result + assert ('\\sphinxlineitem{\\sphinxhref{http://sphinx-doc.org/}{Term in deflist} ' + '(http://sphinx\\sphinxhyphen{}doc.org/)}' + '\n\\sphinxAtStartPar\nDescription') in result + assert '\\sphinxurl{https://github.com/sphinx-doc/sphinx}\n' in result + assert ('\\sphinxhref{mailto:sphinx-dev@googlegroups.com}' + '{sphinx\\sphinxhyphen{}dev@googlegroups.com}') in result + assert '\\begin{savenotes}\\begin{fulllineitems}' not in result + + +@pytest.mark.sphinx( + 'latex', testroot='footnotes', + confoverrides={'latex_show_urls': 'footnote'}) +def test_latex_show_urls_is_footnote(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert ('Same footnote number %\n' + '\\begin{footnote}[1]\\sphinxAtStartFootnote\n' + 'footnote in bar\n%\n\\end{footnote} in bar.rst') in result + assert ('Auto footnote number %\n\\begin{footnote}[2]\\sphinxAtStartFootnote\n' + 'footnote in baz\n%\n\\end{footnote} in baz.rst') in result + assert ('\\phantomsection\\label{\\detokenize{index:id38}}' + '{\\hyperref[\\detokenize{index:the-section-with-a-reference-to-authoryear}]' + '{\\sphinxcrossref{The section with a reference ' + 'to \\sphinxcite{index:authoryear}}}}') in result + assert ('\\phantomsection\\label{\\detokenize{index:id39}}' + '{\\hyperref[\\detokenize{index:the-section-with-a-reference-to}]' + '{\\sphinxcrossref{The section with a reference to }}}') in result + assert ('First footnote: %\n\\begin{footnote}[3]\\sphinxAtStartFootnote\n' + 'First\n%\n\\end{footnote}') in result + assert ('Second footnote: %\n' + '\\begin{footnote}[1]\\sphinxAtStartFootnote\n' + 'Second\n%\n\\end{footnote}') in result + assert ('\\sphinxhref{http://sphinx-doc.org/}{Sphinx}' + '%\n\\begin{footnote}[4]\\sphinxAtStartFootnote\n' + '\\sphinxnolinkurl{http://sphinx-doc.org/}\n%\n\\end{footnote}') in result + assert ('Third footnote: %\n\\begin{footnote}[6]\\sphinxAtStartFootnote\n' + 'Third \\sphinxfootnotemark[7]\n%\n\\end{footnote}%\n' + '\\begin{footnotetext}[7]\\sphinxAtStartFootnote\n' + 'Footnote inside footnote\n%\n' + '\\end{footnotetext}\\ignorespaces') in result + assert ('Fourth footnote: %\n\\begin{footnote}[8]\\sphinxAtStartFootnote\n' + 'Fourth\n%\n\\end{footnote}\n') in result + assert ('\\sphinxhref{http://sphinx-doc.org/~test/}{URL including tilde}' + '%\n\\begin{footnote}[5]\\sphinxAtStartFootnote\n' + '\\sphinxnolinkurl{http://sphinx-doc.org/~test/}\n%\n\\end{footnote}') in result + assert ('\\sphinxlineitem{\\sphinxhref{http://sphinx-doc.org/}' + '{URL in term}\\sphinxfootnotemark[10]}%\n' + '\\begin{footnotetext}[10]' + '\\sphinxAtStartFootnote\n' + '\\sphinxnolinkurl{http://sphinx-doc.org/}\n%\n' + '\\end{footnotetext}\\ignorespaces \n\\sphinxAtStartPar\nDescription') in result + assert ('\\sphinxlineitem{Footnote in term \\sphinxfootnotemark[12]}%\n' + '\\begin{footnotetext}[12]' + '\\sphinxAtStartFootnote\n' + 'Footnote in term\n%\n\\end{footnotetext}\\ignorespaces ' + '\n\\sphinxAtStartPar\nDescription') in result + assert ('\\sphinxlineitem{\\sphinxhref{http://sphinx-doc.org/}{Term in deflist}' + '\\sphinxfootnotemark[11]}%\n' + '\\begin{footnotetext}[11]' + '\\sphinxAtStartFootnote\n' + '\\sphinxnolinkurl{http://sphinx-doc.org/}\n%\n' + '\\end{footnotetext}\\ignorespaces \n\\sphinxAtStartPar\nDescription') in result + assert ('\\sphinxurl{https://github.com/sphinx-doc/sphinx}\n' in result) + assert ('\\sphinxhref{mailto:sphinx-dev@googlegroups.com}' + '{sphinx\\sphinxhyphen{}dev@googlegroups.com}\n') in result + assert '\\begin{savenotes}\\begin{fulllineitems}' in result + + +@pytest.mark.sphinx( + 'latex', testroot='footnotes', + confoverrides={'latex_show_urls': 'no'}) +def test_latex_show_urls_is_no(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert ('Same footnote number %\n' + '\\begin{footnote}[1]\\sphinxAtStartFootnote\n' + 'footnote in bar\n%\n\\end{footnote} in bar.rst') in result + assert ('Auto footnote number %\n\\begin{footnote}[1]\\sphinxAtStartFootnote\n' + 'footnote in baz\n%\n\\end{footnote} in baz.rst') in result + assert ('\\phantomsection\\label{\\detokenize{index:id38}}' + '{\\hyperref[\\detokenize{index:the-section-with-a-reference-to-authoryear}]' + '{\\sphinxcrossref{The section with a reference ' + 'to \\sphinxcite{index:authoryear}}}}') in result + assert ('\\phantomsection\\label{\\detokenize{index:id39}}' + '{\\hyperref[\\detokenize{index:the-section-with-a-reference-to}]' + '{\\sphinxcrossref{The section with a reference to }}}' in result) + assert ('First footnote: %\n\\begin{footnote}[2]\\sphinxAtStartFootnote\n' + 'First\n%\n\\end{footnote}') in result + assert ('Second footnote: %\n' + '\\begin{footnote}[1]\\sphinxAtStartFootnote\n' + 'Second\n%\n\\end{footnote}') in result + assert '\\sphinxhref{http://sphinx-doc.org/}{Sphinx}' in result + assert ('Third footnote: %\n\\begin{footnote}[3]\\sphinxAtStartFootnote\n' + 'Third \\sphinxfootnotemark[4]\n%\n\\end{footnote}%\n' + '\\begin{footnotetext}[4]\\sphinxAtStartFootnote\n' + 'Footnote inside footnote\n%\n\\end{footnotetext}\\ignorespaces') in result + assert ('Fourth footnote: %\n\\begin{footnote}[5]\\sphinxAtStartFootnote\n' + 'Fourth\n%\n\\end{footnote}\n') in result + assert '\\sphinxhref{http://sphinx-doc.org/~test/}{URL including tilde}' in result + assert ('\\sphinxlineitem{\\sphinxhref{http://sphinx-doc.org/}{URL in term}}\n' + '\\sphinxAtStartPar\nDescription') in result + assert ('\\sphinxlineitem{Footnote in term \\sphinxfootnotemark[7]}%\n' + '\\begin{footnotetext}[7]\\sphinxAtStartFootnote\n' + 'Footnote in term\n%\n\\end{footnotetext}\\ignorespaces ' + '\n\\sphinxAtStartPar\nDescription') in result + assert ('\\sphinxlineitem{\\sphinxhref{http://sphinx-doc.org/}{Term in deflist}}' + '\n\\sphinxAtStartPar\nDescription') in result + assert ('\\sphinxurl{https://github.com/sphinx-doc/sphinx}\n' in result) + assert ('\\sphinxhref{mailto:sphinx-dev@googlegroups.com}' + '{sphinx\\sphinxhyphen{}dev@googlegroups.com}\n') in result + assert '\\begin{savenotes}\\begin{fulllineitems}' not in result + + +@pytest.mark.sphinx( + 'latex', testroot='footnotes', + confoverrides={'latex_show_urls': 'footnote', + 'rst_prolog': '.. |URL| replace:: `text <http://www.example.com/>`__'}) +def test_latex_show_urls_footnote_and_substitutions(app, status, warning): + # hyperlinks in substitutions should not effect to make footnotes (refs: #4784) + test_latex_show_urls_is_footnote(app, status, warning) + + +@pytest.mark.sphinx('latex', testroot='image-in-section') +def test_image_in_section(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert ('\\chapter[Test section]{\\lowercase{\\sphinxincludegraphics' + '[width=15bp,height=15bp]}{{pic}.png} Test section}' + in result) + assert ('\\chapter[Other {[}blah{]} section]{Other {[}blah{]} ' + '\\lowercase{\\sphinxincludegraphics[width=15bp,height=15bp]}' + '{{pic}.png} section}' in result) + assert ('\\chapter{Another section}' in result) + + +@pytest.mark.sphinx('latex', testroot='basic', + confoverrides={'latex_logo': 'notfound.jpg'}) +def test_latex_logo_if_not_found(app, status, warning): + with pytest.raises(SphinxError): + app.builder.build_all() + + +@pytest.mark.sphinx('latex', testroot='toctree-maxdepth') +def test_toctree_maxdepth_manual(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert '\\setcounter{tocdepth}{1}' in result + assert '\\setcounter{secnumdepth}' not in result + assert '\\chapter{Foo}' in result + + +@pytest.mark.sphinx( + 'latex', testroot='toctree-maxdepth', + confoverrides={'latex_documents': [ + ('index', 'python.tex', 'Sphinx Tests Documentation', + 'Georg Brandl', 'howto'), + ]}) +def test_toctree_maxdepth_howto(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert '\\setcounter{tocdepth}{2}' in result + assert '\\setcounter{secnumdepth}' not in result + assert '\\section{Foo}' in result + + +@pytest.mark.sphinx( + 'latex', testroot='toctree-maxdepth', + confoverrides={'root_doc': 'foo'}) +def test_toctree_not_found(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert '\\setcounter{tocdepth}' not in result + assert '\\setcounter{secnumdepth}' not in result + assert '\\chapter{Foo A}' in result + + +@pytest.mark.sphinx( + 'latex', testroot='toctree-maxdepth', + confoverrides={'root_doc': 'bar'}) +def test_toctree_without_maxdepth(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert '\\setcounter{tocdepth}' not in result + assert '\\setcounter{secnumdepth}' not in result + + +@pytest.mark.sphinx( + 'latex', testroot='toctree-maxdepth', + confoverrides={'root_doc': 'qux'}) +def test_toctree_with_deeper_maxdepth(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert '\\setcounter{tocdepth}{3}' in result + assert '\\setcounter{secnumdepth}{3}' in result + + +@pytest.mark.sphinx( + 'latex', testroot='toctree-maxdepth', + confoverrides={'latex_toplevel_sectioning': None}) +def test_latex_toplevel_sectioning_is_None(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert '\\chapter{Foo}' in result + + +@pytest.mark.sphinx( + 'latex', testroot='toctree-maxdepth', + confoverrides={'latex_toplevel_sectioning': 'part'}) +def test_latex_toplevel_sectioning_is_part(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert '\\part{Foo}' in result + assert '\\chapter{Foo A}' in result + assert '\\chapter{Foo B}' in result + + +@pytest.mark.sphinx( + 'latex', testroot='toctree-maxdepth', + confoverrides={'latex_toplevel_sectioning': 'part', + 'latex_documents': [ + ('index', 'python.tex', 'Sphinx Tests Documentation', + 'Georg Brandl', 'howto'), + ]}) +def test_latex_toplevel_sectioning_is_part_with_howto(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert '\\part{Foo}' in result + assert '\\section{Foo A}' in result + assert '\\section{Foo B}' in result + + +@pytest.mark.sphinx( + 'latex', testroot='toctree-maxdepth', + confoverrides={'latex_toplevel_sectioning': 'chapter'}) +def test_latex_toplevel_sectioning_is_chapter(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert '\\chapter{Foo}' in result + + +@pytest.mark.sphinx( + 'latex', testroot='toctree-maxdepth', + confoverrides={'latex_toplevel_sectioning': 'chapter', + 'latex_documents': [ + ('index', 'python.tex', 'Sphinx Tests Documentation', + 'Georg Brandl', 'howto'), + ]}) +def test_latex_toplevel_sectioning_is_chapter_with_howto(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert '\\section{Foo}' in result + + +@pytest.mark.sphinx( + 'latex', testroot='toctree-maxdepth', + confoverrides={'latex_toplevel_sectioning': 'section'}) +def test_latex_toplevel_sectioning_is_section(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + assert '\\section{Foo}' in result + + +@skip_if_stylefiles_notfound +@pytest.mark.sphinx('latex', testroot='maxlistdepth') +def test_maxlistdepth_at_ten(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + print(status.getvalue()) + print(warning.getvalue()) + compile_latex_document(app, 'python.tex') + + +@pytest.mark.sphinx('latex', testroot='latex-table', + confoverrides={'latex_table_style': []}) +@pytest.mark.test_params(shared_result='latex-table') +def test_latex_table_tabulars(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + tables = {} + for chap in re.split(r'\\(?:section|chapter){', result)[1:]: + sectname, content = chap.split('}', 1) + content = re.sub(r'\\sphinxstepscope', '', content) # filter a separator + tables[sectname] = content.strip() + + def get_expected(name): + return (app.srcdir / 'expects' / (name + '.tex')).read_text(encoding='utf8').strip() + + # simple_table + actual = tables['simple table'] + expected = get_expected('simple_table') + assert actual == expected + + # table having :widths: option + actual = tables['table having :widths: option'] + expected = get_expected('table_having_widths') + assert actual == expected + + # table having :align: option (tabulary) + actual = tables['table having :align: option (tabulary)'] + expected = get_expected('tabulary_having_widths') + assert actual == expected + + # table having :align: option (tabular) + actual = tables['table having :align: option (tabular)'] + expected = get_expected('tabular_having_widths') + assert actual == expected + + # table with tabularcolumn + actual = tables['table with tabularcolumn'] + expected = get_expected('tabularcolumn') + assert actual == expected + + # table with cell in first column having three paragraphs + actual = tables['table with cell in first column having three paragraphs'] + expected = get_expected('table_having_threeparagraphs_cell_in_first_col') + assert actual == expected + + # table having caption + actual = tables['table having caption'] + expected = get_expected('table_having_caption') + assert actual == expected + + # table having verbatim + actual = tables['table having verbatim'] + expected = get_expected('table_having_verbatim') + assert actual == expected + + # table having problematic cell + actual = tables['table having problematic cell'] + expected = get_expected('table_having_problematic_cell') + assert actual == expected + + # table having both :widths: and problematic cell + actual = tables['table having both :widths: and problematic cell'] + expected = get_expected('table_having_widths_and_problematic_cell') + assert actual == expected + + # table having both stub columns and problematic cell + actual = tables['table having both stub columns and problematic cell'] + expected = get_expected('table_having_stub_columns_and_problematic_cell') + assert actual == expected + + +@pytest.mark.sphinx('latex', testroot='latex-table', + confoverrides={'latex_table_style': []}) +@pytest.mark.test_params(shared_result='latex-table') +def test_latex_table_longtable(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + tables = {} + for chap in re.split(r'\\(?:section|chapter){', result)[1:]: + sectname, content = chap.split('}', 1) + content = re.sub(r'\\sphinxstepscope', '', content) # filter a separator + tables[sectname] = content.strip() + + def get_expected(name): + return (app.srcdir / 'expects' / (name + '.tex')).read_text(encoding='utf8').strip() + + # longtable + actual = tables['longtable'] + expected = get_expected('longtable') + assert actual == expected + + # longtable having :widths: option + actual = tables['longtable having :widths: option'] + expected = get_expected('longtable_having_widths') + assert actual == expected + + # longtable having :align: option + actual = tables['longtable having :align: option'] + expected = get_expected('longtable_having_align') + assert actual == expected + + # longtable with tabularcolumn + actual = tables['longtable with tabularcolumn'] + expected = get_expected('longtable_with_tabularcolumn') + assert actual == expected + + # longtable having caption + actual = tables['longtable having caption'] + expected = get_expected('longtable_having_caption') + assert actual == expected + + # longtable having verbatim + actual = tables['longtable having verbatim'] + expected = get_expected('longtable_having_verbatim') + assert actual == expected + + # longtable having problematic cell + actual = tables['longtable having problematic cell'] + expected = get_expected('longtable_having_problematic_cell') + assert actual == expected + + # longtable having both :widths: and problematic cell + actual = tables['longtable having both :widths: and problematic cell'] + expected = get_expected('longtable_having_widths_and_problematic_cell') + assert actual == expected + + # longtable having both stub columns and problematic cell + actual = tables['longtable having both stub columns and problematic cell'] + expected = get_expected('longtable_having_stub_columns_and_problematic_cell') + assert actual == expected + + +@pytest.mark.sphinx('latex', testroot='latex-table', + confoverrides={'latex_table_style': []}) +@pytest.mark.test_params(shared_result='latex-table') +def test_latex_table_complex_tables(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + tables = {} + for chap in re.split(r'\\(?:section|renewcommand){', result)[1:]: + sectname, content = chap.split('}', 1) + tables[sectname] = content.strip() + + def get_expected(name): + return (app.srcdir / 'expects' / (name + '.tex')).read_text(encoding='utf8').strip() + + # grid table + actual = tables['grid table'] + expected = get_expected('gridtable') + assert actual == expected + + # grid table with tabularcolumns + # MEMO: filename should end with tabularcolumns but tabularcolumn has been + # used in existing other cases + actual = tables['grid table with tabularcolumns having no vline'] + expected = get_expected('gridtable_with_tabularcolumn') + assert actual == expected + + # complex spanning cell + actual = tables['complex spanning cell'] + expected = get_expected('complex_spanning_cell') + assert actual == expected + + +@pytest.mark.sphinx('latex', testroot='latex-table') +def test_latex_table_with_booktabs_and_colorrows(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + assert r'\PassOptionsToPackage{booktabs}{sphinx}' in result + assert r'\PassOptionsToPackage{colorrows}{sphinx}' in result + # tabularcolumns + assert r'\begin{longtable}{|c|c|}' in result + # class: standard + assert r'\begin{tabulary}{\linewidth}[t]{|T|T|T|T|T|}' in result + assert r'\begin{longtable}{ll}' in result + assert r'\begin{tabular}[t]{*{2}{\X{1}{2}}}' in result + assert r'\begin{tabular}[t]{\X{30}{100}\X{70}{100}}' in result + + +@pytest.mark.sphinx('latex', testroot='latex-table', + confoverrides={'templates_path': ['_mytemplates/latex']}) +def test_latex_table_custom_template_caseA(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + assert 'SALUT LES COPAINS' in result + + +@pytest.mark.sphinx('latex', testroot='latex-table', + confoverrides={'templates_path': ['_mytemplates']}) +def test_latex_table_custom_template_caseB(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + assert 'SALUT LES COPAINS' not in result + + +@pytest.mark.sphinx('latex', testroot='latex-table') +@pytest.mark.test_params(shared_result='latex-table') +def test_latex_table_custom_template_caseC(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + assert 'SALUT LES COPAINS' not in result + + +@pytest.mark.sphinx('latex', testroot='directives-raw') +def test_latex_raw_directive(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + + # standard case + assert 'standalone raw directive (HTML)' not in result + assert ('\\label{\\detokenize{index:id1}}\n' + 'standalone raw directive (LaTeX)' in result) + + # with substitution + assert 'HTML: abc ghi' in result + assert 'LaTeX: abc def ghi' in result + + +@pytest.mark.sphinx('latex', testroot='images') +def test_latex_images(app, status, warning): + app.builder.build_all() + + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + + # images are copied + assert '\\sphinxincludegraphics{{python-logo}.png}' in result + assert (app.outdir / 'python-logo.png').exists() + + # not found images + assert '\\sphinxincludegraphics{{NOT_EXIST}.PNG}' not in result + assert ('WARNING: Could not fetch remote image: ' + 'https://www.google.com/NOT_EXIST.PNG [404]' in warning.getvalue()) + + # an image having target + assert ('\\sphinxhref{https://www.sphinx-doc.org/}' + '{\\sphinxincludegraphics{{rimg}.png}}\n\n' in result) + + # a centerized image having target + assert ('\\sphinxhref{https://www.python.org/}{{\\hspace*{\\fill}' + '\\sphinxincludegraphics{{rimg}.png}\\hspace*{\\fill}}}\n\n' in result) + + +@pytest.mark.sphinx('latex', testroot='latex-index') +def test_latex_index(app, status, warning): + app.builder.build_all() + + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + assert ('A \\index{famous@\\spxentry{famous}}famous ' + '\\index{equation@\\spxentry{equation}}equation:\n' in result) + assert ('\n\\index{Einstein@\\spxentry{Einstein}}' + '\\index{relativity@\\spxentry{relativity}}' + '\\ignorespaces \n\\sphinxAtStartPar\nand') in result + assert ('\n\\index{main \\sphinxleftcurlybrace{}@\\spxentry{' + 'main \\sphinxleftcurlybrace{}}}\\ignorespaces ' in result) + + +@pytest.mark.sphinx('latex', testroot='latex-equations') +def test_latex_equations(app, status, warning): + app.builder.build_all() + + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + expected = (app.srcdir / 'expects' / 'latex-equations.tex').read_text(encoding='utf8').strip() + + assert expected in result + + +@pytest.mark.sphinx('latex', testroot='image-in-parsed-literal') +def test_latex_image_in_parsed_literal(app, status, warning): + app.builder.build_all() + + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + assert ('{\\sphinxunactivateextrasandspace \\raisebox{-0.5\\height}' + '{\\sphinxincludegraphics[height=2.00000cm]{{pic}.png}}' + '}AFTER') in result + + +@pytest.mark.sphinx('latex', testroot='nested-enumerated-list') +def test_latex_nested_enumerated_list(app, status, warning): + app.builder.build_all() + + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + assert ('\\sphinxsetlistlabels{\\arabic}{enumi}{enumii}{}{.}%\n' + '\\setcounter{enumi}{4}\n' in result) + assert ('\\sphinxsetlistlabels{\\alph}{enumii}{enumiii}{}{.}%\n' + '\\setcounter{enumii}{3}\n' in result) + assert ('\\sphinxsetlistlabels{\\arabic}{enumiii}{enumiv}{}{)}%\n' + '\\setcounter{enumiii}{9}\n' in result) + assert ('\\sphinxsetlistlabels{\\arabic}{enumiv}{enumv}{(}{)}%\n' + '\\setcounter{enumiv}{23}\n' in result) + assert ('\\sphinxsetlistlabels{\\roman}{enumii}{enumiii}{}{.}%\n' + '\\setcounter{enumii}{2}\n' in result) + + +@pytest.mark.sphinx('latex', testroot='footnotes') +def test_latex_thebibliography(app, status, warning): + app.builder.build_all() + + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + assert ('\\begin{sphinxthebibliography}{AuthorYe}\n' + '\\bibitem[AuthorYear]{index:authoryear}\n\\sphinxAtStartPar\n' + 'Author, Title, Year\n' + '\\end{sphinxthebibliography}\n' in result) + assert '\\sphinxcite{index:authoryear}' in result + + +@pytest.mark.sphinx('latex', testroot='glossary') +def test_latex_glossary(app, status, warning): + app.builder.build_all() + + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + assert (r'\sphinxlineitem{ähnlich\index{ähnlich@\spxentry{ähnlich}|spxpagem}' + r'\phantomsection' + r'\label{\detokenize{index:term-ahnlich}}}' in result) + assert (r'\sphinxlineitem{boson\index{boson@\spxentry{boson}|spxpagem}\phantomsection' + r'\label{\detokenize{index:term-boson}}}' in result) + assert (r'\sphinxlineitem{\sphinxstyleemphasis{fermion}' + r'\index{fermion@\spxentry{fermion}|spxpagem}' + r'\phantomsection' + r'\label{\detokenize{index:term-fermion}}}' in result) + assert (r'\sphinxlineitem{tauon\index{tauon@\spxentry{tauon}|spxpagem}\phantomsection' + r'\label{\detokenize{index:term-tauon}}}' + r'\sphinxlineitem{myon\index{myon@\spxentry{myon}|spxpagem}\phantomsection' + r'\label{\detokenize{index:term-myon}}}' + r'\sphinxlineitem{electron\index{electron@\spxentry{electron}|spxpagem}\phantomsection' + r'\label{\detokenize{index:term-electron}}}' in result) + assert (r'\sphinxlineitem{über\index{über@\spxentry{über}|spxpagem}\phantomsection' + r'\label{\detokenize{index:term-uber}}}' in result) + + +@pytest.mark.sphinx('latex', testroot='latex-labels') +def test_latex_labels(app, status, warning): + app.builder.build_all() + + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + + # figures + assert (r'\caption{labeled figure}' + r'\label{\detokenize{index:id1}}' + r'\label{\detokenize{index:figure2}}' + r'\label{\detokenize{index:figure1}}' + r'\end{figure}' in result) + assert (r'\caption{labeled figure}' + '\\label{\\detokenize{index:figure3}}\n' + '\\begin{sphinxlegend}\n\\sphinxAtStartPar\n' + 'with a legend\n\\end{sphinxlegend}\n' + r'\end{figure}' in result) + + # code-blocks + assert (r'\def\sphinxLiteralBlockLabel{' + r'\label{\detokenize{index:codeblock2}}' + r'\label{\detokenize{index:codeblock1}}}' in result) + assert (r'\def\sphinxLiteralBlockLabel{' + r'\label{\detokenize{index:codeblock3}}}' in result) + + # tables + assert (r'\sphinxcaption{table caption}' + r'\label{\detokenize{index:id2}}' + r'\label{\detokenize{index:table2}}' + r'\label{\detokenize{index:table1}}' in result) + assert (r'\sphinxcaption{table caption}' + r'\label{\detokenize{index:table3}}' in result) + + # sections + assert ('\\chapter{subsection}\n' + r'\label{\detokenize{index:subsection}}' + r'\label{\detokenize{index:section2}}' + r'\label{\detokenize{index:section1}}' in result) + assert ('\\section{subsubsection}\n' + r'\label{\detokenize{index:subsubsection}}' + r'\label{\detokenize{index:section3}}' in result) + assert ('\\subsection{otherdoc}\n' + r'\label{\detokenize{otherdoc:otherdoc}}' + r'\label{\detokenize{otherdoc::doc}}' in result) + + # Embedded standalone hyperlink reference (refs: #5948) + assert result.count(r'\label{\detokenize{index:section1}}') == 1 + + +@pytest.mark.sphinx('latex', testroot='latex-figure-in-admonition') +def test_latex_figure_in_admonition(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + assert r'\begin{figure}[H]' in result + + +def test_default_latex_documents(): + from sphinx.util import texescape + texescape.init() + config = Config({'root_doc': 'index', + 'project': 'STASI™ Documentation', + 'author': "Wolfgang Schäuble & G'Beckstein."}) + config.init_values() + config.add('latex_engine', None, True, None) + config.add('latex_theme', 'manual', True, None) + expected = [('index', 'stasi.tex', 'STASI™ Documentation', + r"Wolfgang Schäuble \& G\textquotesingle{}Beckstein.\@{}", 'manual')] + assert default_latex_documents(config) == expected + + +@skip_if_requested +@skip_if_stylefiles_notfound +@pytest.mark.sphinx('latex', testroot='latex-includegraphics') +def test_includegraphics_oversized(app, status, warning): + app.builder.build_all() + print(status.getvalue()) + print(warning.getvalue()) + compile_latex_document(app) + + +@pytest.mark.sphinx('latex', testroot='index_on_title') +def test_index_on_title(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + assert ('\\chapter{Test for index in top level title}\n' + '\\label{\\detokenize{contents:test-for-index-in-top-level-title}}' + '\\index{index@\\spxentry{index}}\n' + in result) + + +@pytest.mark.sphinx('latex', testroot='latex-unicode', + confoverrides={'latex_engine': 'pdflatex'}) +def test_texescape_for_non_unicode_supported_engine(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + assert 'script small e: e' in result + assert 'double struck italic small i: i' in result + assert r'superscript: \(\sp{\text{0}}\), \(\sp{\text{1}}\)' in result + assert r'subscript: \(\sb{\text{0}}\), \(\sb{\text{1}}\)' in result + + +@pytest.mark.sphinx('latex', testroot='latex-unicode', + confoverrides={'latex_engine': 'xelatex'}) +def test_texescape_for_unicode_supported_engine(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(result) + assert 'script small e: e' in result + assert 'double struck italic small i: i' in result + assert 'superscript: ⁰, ¹' in result + assert 'subscript: ₀, ₁' in result + + +@pytest.mark.sphinx('latex', testroot='basic', + confoverrides={'latex_elements': {'extrapackages': r'\usepackage{foo}'}}) +def test_latex_elements_extrapackages(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'test.tex').read_text(encoding='utf8') + assert r'\usepackage{foo}' in result + + +@pytest.mark.sphinx('latex', testroot='nested-tables') +def test_latex_nested_tables(app, status, warning): + app.builder.build_all() + assert warning.getvalue() == '' + + +@pytest.mark.sphinx('latex', testroot='latex-container') +def test_latex_container(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + assert r'\begin{sphinxuseclass}{classname}' in result + assert r'\end{sphinxuseclass}' in result + + +@pytest.mark.sphinx('latex', testroot='reST-code-role') +def test_latex_code_role(app): + app.build() + content = (app.outdir / 'python.tex').read_text(encoding='utf8') + + common_content = ( + r'\PYG{k}{def} ' + r'\PYG{n+nf}{foo}' + r'\PYG{p}{(}' + r'\PYG{l+m+mi}{1} ' + r'\PYG{o}{+} ' + r'\PYG{l+m+mi}{2} ' + r'\PYG{o}{+} ' + r'\PYG{k+kc}{None} ' + r'\PYG{o}{+} ' + r'\PYG{l+s+s2}{\PYGZdq{}}' + r'\PYG{l+s+s2}{abc}' + r'\PYG{l+s+s2}{\PYGZdq{}}' + r'\PYG{p}{)}' + r'\PYG{p}{:} ' + r'\PYG{k}{pass}') + assert (r'Inline \sphinxcode{\sphinxupquote{%' + '\n' + + common_content + '%\n}} code block') in content + assert (r'\begin{sphinxVerbatim}[commandchars=\\\{\}]' + + '\n' + common_content + '\n' + r'\end{sphinxVerbatim}') in content + + +@pytest.mark.sphinx('latex', testroot='images') +def test_copy_images(app, status, warning): + app.build() + + test_dir = Path(app.outdir) + images = { + image.name for image in test_dir.rglob('*') + if image.suffix in {'.gif', '.pdf', '.png', '.svg'} + } + images.discard('python-logo.png') + assert images == { + 'img.pdf', + 'rimg.png', + 'testimäge.png', + } + + +@pytest.mark.sphinx('latex', testroot='latex-labels-before-module') +def test_duplicated_labels_before_module(app, status, warning): + app.build() + content: str = (app.outdir / 'python.tex').read_text(encoding='utf8') + + def count_label(name): + text = r'\phantomsection\label{\detokenize{%s}}' % name + return content.count(text) + + pattern = r'\\phantomsection\\label\{\\detokenize\{index:label-(?:auto-)?\d+[a-z]*}}' + # labels found in the TeX output + output_labels = frozenset(match.group() for match in re.finditer(pattern, content)) + # labels that have been tested and occurring exactly once in the output + tested_labels = set() + + # iterate over the (explicit) labels in the corresponding index.rst + for rst_label_name in [ + 'label_1a', 'label_1b', 'label_2', 'label_3', + 'label_auto_1a', 'label_auto_1b', 'label_auto_2', 'label_auto_3', + ]: + tex_label_name = 'index:' + rst_label_name.replace('_', '-') + tex_label_code = r'\phantomsection\label{\detokenize{%s}}' % tex_label_name + assert content.count(tex_label_code) == 1, f'duplicated label: {tex_label_name!r}' + tested_labels.add(tex_label_code) + + # ensure that we did not forget any label to check + # and if so, report them nicely in case of failure + assert sorted(tested_labels) == sorted(output_labels) + + +@pytest.mark.sphinx('latex', testroot='domain-py-python_maximum_signature_line_length', + confoverrides={'python_maximum_signature_line_length': 23}) +def test_one_parameter_per_line(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + + # TODO: should these asserts check presence or absence of a final \sphinxparamcomma? + # signature of 23 characters is too short to trigger one-param-per-line mark-up + assert ('\\pysiglinewithargsret{\\sphinxbfcode{\\sphinxupquote{hello}}}' in result) + + assert ('\\pysigwithonelineperarg{\\sphinxbfcode{\\sphinxupquote{foo}}}' in result) diff --git a/tests/test_build_linkcheck.py b/tests/test_build_linkcheck.py new file mode 100644 index 0000000..38a0bd1 --- /dev/null +++ b/tests/test_build_linkcheck.py @@ -0,0 +1,989 @@ +"""Test the build process with manpage builder with the test root.""" + +from __future__ import annotations + +import http.server +import json +import re +import sys +import textwrap +import time +import wsgiref.handlers +from base64 import b64encode +from os import path +from queue import Queue +from unittest import mock + +import pytest +from urllib3.poolmanager import PoolManager + +import sphinx.util.http_date +from sphinx.builders.linkcheck import ( + CheckRequest, + Hyperlink, + HyperlinkAvailabilityCheckWorker, + RateLimit, +) +from sphinx.testing.util import strip_escseq +from sphinx.util import requests +from sphinx.util.console import strip_colors + +from .utils import CERT_FILE, http_server, https_server + +ts_re = re.compile(r".*\[(?P<ts>.*)\].*") +SPHINX_DOCS_INDEX = path.abspath(path.join(__file__, "..", "roots", "test-linkcheck", "sphinx-docs-index.html")) + + +class DefaultsHandler(http.server.BaseHTTPRequestHandler): + protocol_version = "HTTP/1.1" + + def do_HEAD(self): + if self.path[1:].rstrip() in {"", "anchor.html"}: + self.send_response(200, "OK") + self.send_header("Content-Length", "0") + self.end_headers() + else: + self.send_response(404, "Not Found") + self.send_header("Content-Length", "0") + self.end_headers() + + def do_GET(self): + if self.path[1:].rstrip() == "": + content = b"ok\n\n" + elif self.path[1:].rstrip() == "anchor.html": + doc = '<!DOCTYPE html><html><body><a id="found"></a></body></html>' + content = doc.encode("utf-8") + else: + content = b"" + + if content: + self.send_response(200, "OK") + self.send_header("Content-Length", str(len(content))) + self.end_headers() + self.wfile.write(content) + else: + self.send_response(404, "Not Found") + self.send_header("Content-Length", "0") + self.end_headers() + + +class ConnectionMeasurement: + """Measure the number of distinct host connections created during linkchecking""" + + def __init__(self): + self.connections = set() + self.urllib3_connection_from_url = PoolManager.connection_from_url + self.patcher = mock.patch.object( + target=PoolManager, + attribute='connection_from_url', + new=self._collect_connections(), + ) + + def _collect_connections(self): + def connection_collector(obj, url): + connection = self.urllib3_connection_from_url(obj, url) + self.connections.add(connection) + return connection + return connection_collector + + def __enter__(self): + self.patcher.start() + return self + + def __exit__(self, *args, **kwargs): + for connection in self.connections: + connection.close() + self.patcher.stop() + + @property + def connection_count(self): + return len(self.connections) + + +@pytest.mark.sphinx('linkcheck', testroot='linkcheck', freshenv=True) +def test_defaults(app): + with http_server(DefaultsHandler): + with ConnectionMeasurement() as m: + app.build() + assert m.connection_count <= 5 + + # Text output + assert (app.outdir / 'output.txt').exists() + content = (app.outdir / 'output.txt').read_text(encoding='utf8') + + # looking for '#top' and '#does-not-exist' not found should fail + assert "Anchor 'top' not found" in content + assert "Anchor 'does-not-exist' not found" in content + # images should fail + assert "Not Found for url: http://localhost:7777/image.png" in content + assert "Not Found for url: http://localhost:7777/image2.png" in content + # looking for local file should fail + assert "[broken] path/to/notfound" in content + assert len(content.splitlines()) == 5 + + # JSON output + assert (app.outdir / 'output.json').exists() + content = (app.outdir / 'output.json').read_text(encoding='utf8') + + rows = [json.loads(x) for x in content.splitlines()] + row = rows[0] + for attr in ("filename", "lineno", "status", "code", "uri", "info"): + assert attr in row + + assert len(content.splitlines()) == 10 + assert len(rows) == 10 + # the output order of the rows is not stable + # due to possible variance in network latency + rowsby = {row["uri"]: row for row in rows} + assert rowsby["http://localhost:7777#!bar"] == { + 'filename': 'links.rst', + 'lineno': 5, + 'status': 'working', + 'code': 0, + 'uri': 'http://localhost:7777#!bar', + 'info': '', + } + assert rowsby['http://localhost:7777/image2.png'] == { + 'filename': 'links.rst', + 'lineno': 13, + 'status': 'broken', + 'code': 0, + 'uri': 'http://localhost:7777/image2.png', + 'info': '404 Client Error: Not Found for url: http://localhost:7777/image2.png', + } + # looking for '#top' and '#does-not-exist' not found should fail + assert rowsby["http://localhost:7777/#top"]["info"] == "Anchor 'top' not found" + assert rowsby["http://localhost:7777/#top"]["status"] == "broken" + assert rowsby["http://localhost:7777#does-not-exist"]["info"] == "Anchor 'does-not-exist' not found" + # images should fail + assert "Not Found for url: http://localhost:7777/image.png" in rowsby["http://localhost:7777/image.png"]["info"] + # anchor should be found + assert rowsby['http://localhost:7777/anchor.html#found'] == { + 'filename': 'links.rst', + 'lineno': 14, + 'status': 'working', + 'code': 0, + 'uri': 'http://localhost:7777/anchor.html#found', + 'info': '', + } + + +@pytest.mark.sphinx( + 'linkcheck', testroot='linkcheck', freshenv=True, + confoverrides={'linkcheck_anchors': False}) +def test_check_link_response_only(app): + with http_server(DefaultsHandler): + app.build() + + # JSON output + assert (app.outdir / 'output.json').exists() + content = (app.outdir / 'output.json').read_text(encoding='utf8') + + rows = [json.loads(x) for x in content.splitlines()] + rowsby = {row["uri"]: row for row in rows} + assert rowsby["http://localhost:7777/#top"]["status"] == "working" + + +@pytest.mark.sphinx('linkcheck', testroot='linkcheck-too-many-retries', freshenv=True) +def test_too_many_retries(app): + with http_server(DefaultsHandler): + app.build() + + # Text output + assert (app.outdir / 'output.txt').exists() + content = (app.outdir / 'output.txt').read_text(encoding='utf8') + + # looking for non-existent URL should fail + assert " Max retries exceeded with url: /doesnotexist" in content + + # JSON output + assert (app.outdir / 'output.json').exists() + content = (app.outdir / 'output.json').read_text(encoding='utf8') + + assert len(content.splitlines()) == 1 + row = json.loads(content) + # the output order of the rows is not stable + # due to possible variance in network latency + + # looking for non-existent URL should fail + assert row['filename'] == 'index.rst' + assert row['lineno'] == 1 + assert row['status'] == 'broken' + assert row['code'] == 0 + assert row['uri'] == 'https://localhost:7777/doesnotexist' + + +@pytest.mark.sphinx('linkcheck', testroot='linkcheck-raw-node', freshenv=True) +def test_raw_node(app): + with http_server(OKHandler): + app.build() + + # JSON output + assert (app.outdir / 'output.json').exists() + content = (app.outdir / 'output.json').read_text(encoding='utf8') + + assert len(content.splitlines()) == 1 + row = json.loads(content) + + # raw nodes' url should be checked too + assert row == { + 'filename': 'index.rst', + 'lineno': 1, + 'status': 'working', + 'code': 0, + 'uri': 'http://localhost:7777/', + 'info': '', + } + + +@pytest.mark.sphinx( + 'linkcheck', testroot='linkcheck-anchors-ignore', freshenv=True, + confoverrides={'linkcheck_anchors_ignore': ["^!", "^top$"]}) +def test_anchors_ignored(app): + with http_server(OKHandler): + app.build() + + assert (app.outdir / 'output.txt').exists() + content = (app.outdir / 'output.txt').read_text(encoding='utf8') + + # expect all ok when excluding #top + assert not content + + +class AnchorsIgnoreForUrlHandler(http.server.BaseHTTPRequestHandler): + def do_HEAD(self): + if self.path in {'/valid', '/ignored'}: + self.send_response(200, "OK") + else: + self.send_response(404, "Not Found") + self.end_headers() + + def do_GET(self): + self.do_HEAD() + if self.path == '/valid': + self.wfile.write(b"<h1 id='valid-anchor'>valid anchor</h1>\n") + elif self.path == '/ignored': + self.wfile.write(b"no anchor but page exists\n") + + +@pytest.mark.sphinx( + 'linkcheck', testroot='linkcheck-anchors-ignore-for-url', freshenv=True, + confoverrides={'linkcheck_anchors_ignore_for_url': [ + 'http://localhost:7777/ignored', # existing page + 'http://localhost:7777/invalid', # unknown page + ]}) +def test_anchors_ignored_for_url(app): + with http_server(AnchorsIgnoreForUrlHandler): + app.build() + + assert (app.outdir / 'output.txt').exists() + content = (app.outdir / 'output.json').read_text(encoding='utf8') + + attrs = ('filename', 'lineno', 'status', 'code', 'uri', 'info') + data = [json.loads(x) for x in content.splitlines()] + assert len(data) == 7 + assert all(all(attr in row for attr in attrs) for row in data) + + # rows may be unsorted due to network latency or + # the order the threads are processing the links + rows = {r['uri']: {'status': r['status'], 'info': r['info']} for r in data} + + assert rows['http://localhost:7777/valid']['status'] == 'working' + assert rows['http://localhost:7777/valid#valid-anchor']['status'] == 'working' + assert rows['http://localhost:7777/valid#invalid-anchor'] == { + 'status': 'broken', + 'info': "Anchor 'invalid-anchor' not found", + } + + assert rows['http://localhost:7777/ignored']['status'] == 'working' + assert rows['http://localhost:7777/ignored#invalid-anchor']['status'] == 'working' + + assert rows['http://localhost:7777/invalid'] == { + 'status': 'broken', + 'info': '404 Client Error: Not Found for url: http://localhost:7777/invalid', + } + assert rows['http://localhost:7777/invalid#anchor'] == { + 'status': 'broken', + 'info': '404 Client Error: Not Found for url: http://localhost:7777/invalid', + } + + +@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-anchor', freshenv=True) +def test_raises_for_invalid_status(app): + class InternalServerErrorHandler(http.server.BaseHTTPRequestHandler): + protocol_version = "HTTP/1.1" + + def do_GET(self): + self.send_error(500, "Internal Server Error") + + with http_server(InternalServerErrorHandler): + app.build() + content = (app.outdir / 'output.txt').read_text(encoding='utf8') + assert content == ( + "index.rst:1: [broken] http://localhost:7777/#anchor: " + "500 Server Error: Internal Server Error " + "for url: http://localhost:7777/\n" + ) + + +def custom_handler(valid_credentials=(), success_criteria=lambda _: True): + """ + Returns an HTTP request handler that authenticates the client and then determines + an appropriate HTTP response code, based on caller-provided credentials and optional + success criteria, respectively. + """ + expected_token = None + if valid_credentials: + assert len(valid_credentials) == 2, "expected a pair of strings as credentials" + expected_token = b64encode(":".join(valid_credentials).encode()).decode("utf-8") + del valid_credentials + + class CustomHandler(http.server.BaseHTTPRequestHandler): + protocol_version = "HTTP/1.1" + + def authenticated(method): + def method_if_authenticated(self): + if (expected_token is None + or self.headers["Authorization"] == f"Basic {expected_token}"): + return method(self) + else: + self.send_response(403, "Forbidden") + self.send_header("Content-Length", "0") + self.end_headers() + + return method_if_authenticated + + @authenticated + def do_HEAD(self): + self.do_GET() + + @authenticated + def do_GET(self): + if success_criteria(self): + self.send_response(200, "OK") + self.send_header("Content-Length", "0") + else: + self.send_response(400, "Bad Request") + self.send_header("Content-Length", "0") + self.end_headers() + + return CustomHandler + + +@pytest.mark.sphinx( + 'linkcheck', testroot='linkcheck-localserver', freshenv=True, + confoverrides={'linkcheck_auth': [ + (r'^$', ('no', 'match')), + (r'^http://localhost:7777/$', ('user1', 'password')), + (r'.*local.*', ('user2', 'hunter2')), + ]}) +def test_auth_header_uses_first_match(app): + with http_server(custom_handler(valid_credentials=("user1", "password"))): + app.build() + + with open(app.outdir / "output.json", encoding="utf-8") as fp: + content = json.load(fp) + + assert content["status"] == "working" + + +@pytest.mark.sphinx( + 'linkcheck', testroot='linkcheck-localserver', freshenv=True, + confoverrides={'linkcheck_auth': [(r'^$', ('user1', 'password'))]}) +def test_auth_header_no_match(app): + with http_server(custom_handler(valid_credentials=("user1", "password"))): + app.build() + + with open(app.outdir / "output.json", encoding="utf-8") as fp: + content = json.load(fp) + + # TODO: should this test's webserver return HTTP 401 here? + # https://github.com/sphinx-doc/sphinx/issues/11433 + assert content["info"] == "403 Client Error: Forbidden for url: http://localhost:7777/" + assert content["status"] == "broken" + + +@pytest.mark.sphinx( + 'linkcheck', testroot='linkcheck-localserver', freshenv=True, + confoverrides={'linkcheck_request_headers': { + "http://localhost:7777/": { + "Accept": "text/html", + }, + "*": { + "X-Secret": "open sesami", + }, + }}) +def test_linkcheck_request_headers(app): + def check_headers(self): + if "X-Secret" in self.headers: + return False + if self.headers["Accept"] != "text/html": + return False + return True + + with http_server(custom_handler(success_criteria=check_headers)): + app.build() + + with open(app.outdir / "output.json", encoding="utf-8") as fp: + content = json.load(fp) + + assert content["status"] == "working" + + +@pytest.mark.sphinx( + 'linkcheck', testroot='linkcheck-localserver', freshenv=True, + confoverrides={'linkcheck_request_headers': { + "http://localhost:7777": {"Accept": "application/json"}, + "*": {"X-Secret": "open sesami"}, + }}) +def test_linkcheck_request_headers_no_slash(app): + def check_headers(self): + if "X-Secret" in self.headers: + return False + if self.headers["Accept"] != "application/json": + return False + return True + + with http_server(custom_handler(success_criteria=check_headers)): + app.build() + + with open(app.outdir / "output.json", encoding="utf-8") as fp: + content = json.load(fp) + + assert content["status"] == "working" + + +@pytest.mark.sphinx( + 'linkcheck', testroot='linkcheck-localserver', freshenv=True, + confoverrides={'linkcheck_request_headers': { + "http://do.not.match.org": {"Accept": "application/json"}, + "*": {"X-Secret": "open sesami"}, + }}) +def test_linkcheck_request_headers_default(app): + def check_headers(self): + if self.headers["X-Secret"] != "open sesami": + return False + if self.headers["Accept"] == "application/json": + return False + return True + + with http_server(custom_handler(success_criteria=check_headers)): + app.build() + + with open(app.outdir / "output.json", encoding="utf-8") as fp: + content = json.load(fp) + + assert content["status"] == "working" + + +def make_redirect_handler(*, support_head): + class RedirectOnceHandler(http.server.BaseHTTPRequestHandler): + protocol_version = "HTTP/1.1" + + def do_HEAD(self): + if support_head: + self.do_GET() + else: + self.send_response(405, "Method Not Allowed") + self.send_header("Content-Length", "0") + self.end_headers() + + def do_GET(self): + if self.path == "/?redirected=1": + self.send_response(204, "No content") + else: + self.send_response(302, "Found") + self.send_header("Location", "http://localhost:7777/?redirected=1") + self.send_header("Content-Length", "0") + self.end_headers() + + def log_date_time_string(self): + """Strip date and time from logged messages for assertions.""" + return "" + + return RedirectOnceHandler + + +@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver', freshenv=True) +def test_follows_redirects_on_HEAD(app, capsys, warning): + with http_server(make_redirect_handler(support_head=True)): + app.build() + stdout, stderr = capsys.readouterr() + content = (app.outdir / 'output.txt').read_text(encoding='utf8') + assert content == ( + "index.rst:1: [redirected with Found] " + "http://localhost:7777/ to http://localhost:7777/?redirected=1\n" + ) + assert stderr == textwrap.dedent( + """\ + 127.0.0.1 - - [] "HEAD / HTTP/1.1" 302 - + 127.0.0.1 - - [] "HEAD /?redirected=1 HTTP/1.1" 204 - + """, + ) + assert warning.getvalue() == '' + + +@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver', freshenv=True) +def test_follows_redirects_on_GET(app, capsys, warning): + with http_server(make_redirect_handler(support_head=False)): + app.build() + stdout, stderr = capsys.readouterr() + content = (app.outdir / 'output.txt').read_text(encoding='utf8') + assert content == ( + "index.rst:1: [redirected with Found] " + "http://localhost:7777/ to http://localhost:7777/?redirected=1\n" + ) + assert stderr == textwrap.dedent( + """\ + 127.0.0.1 - - [] "HEAD / HTTP/1.1" 405 - + 127.0.0.1 - - [] "GET / HTTP/1.1" 302 - + 127.0.0.1 - - [] "GET /?redirected=1 HTTP/1.1" 204 - + """, + ) + assert warning.getvalue() == '' + + +@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-warn-redirects', + freshenv=True, confoverrides={ + 'linkcheck_allowed_redirects': {'http://localhost:7777/.*1': '.*'}, + }) +def test_linkcheck_allowed_redirects(app, warning): + with http_server(make_redirect_handler(support_head=False)): + app.build() + + with open(app.outdir / 'output.json', encoding='utf-8') as fp: + rows = [json.loads(l) for l in fp.readlines()] + + assert len(rows) == 2 + records = {row["uri"]: row for row in rows} + assert records["http://localhost:7777/path1"]["status"] == "working" + assert records["http://localhost:7777/path2"] == { + 'filename': 'index.rst', + 'lineno': 3, + 'status': 'redirected', + 'code': 302, + 'uri': 'http://localhost:7777/path2', + 'info': 'http://localhost:7777/?redirected=1', + } + + assert ("index.rst:3: WARNING: redirect http://localhost:7777/path2 - with Found to " + "http://localhost:7777/?redirected=1\n" in strip_escseq(warning.getvalue())) + assert len(warning.getvalue().splitlines()) == 1 + + +class OKHandler(http.server.BaseHTTPRequestHandler): + protocol_version = "HTTP/1.1" + + def do_HEAD(self): + self.send_response(200, "OK") + self.send_header("Content-Length", "0") + self.end_headers() + + def do_GET(self): + content = b"ok\n" + self.send_response(200, "OK") + self.send_header("Content-Length", str(len(content))) + self.end_headers() + self.wfile.write(content) + + +@mock.patch("sphinx.builders.linkcheck.requests.get", wraps=requests.get) +@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-https', freshenv=True) +def test_invalid_ssl(get_request, app): + # Link indicates SSL should be used (https) but the server does not handle it. + with http_server(OKHandler): + app.build() + assert not get_request.called + + with open(app.outdir / 'output.json', encoding='utf-8') as fp: + content = json.load(fp) + assert content["status"] == "broken" + assert content["filename"] == "index.rst" + assert content["lineno"] == 1 + assert content["uri"] == "https://localhost:7777/" + assert "SSLError" in content["info"] + + +@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-https', freshenv=True) +def test_connect_to_selfsigned_fails(app): + with https_server(OKHandler): + app.build() + + with open(app.outdir / 'output.json', encoding='utf-8') as fp: + content = json.load(fp) + assert content["status"] == "broken" + assert content["filename"] == "index.rst" + assert content["lineno"] == 1 + assert content["uri"] == "https://localhost:7777/" + assert "[SSL: CERTIFICATE_VERIFY_FAILED]" in content["info"] + + +@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-https', freshenv=True) +def test_connect_to_selfsigned_with_tls_verify_false(app): + app.config.tls_verify = False + with https_server(OKHandler): + app.build() + + with open(app.outdir / 'output.json', encoding='utf-8') as fp: + content = json.load(fp) + assert content == { + "code": 0, + "status": "working", + "filename": "index.rst", + "lineno": 1, + "uri": "https://localhost:7777/", + "info": "", + } + + +@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-https', freshenv=True) +def test_connect_to_selfsigned_with_tls_cacerts(app): + app.config.tls_cacerts = CERT_FILE + with https_server(OKHandler): + app.build() + + with open(app.outdir / 'output.json', encoding='utf-8') as fp: + content = json.load(fp) + assert content == { + "code": 0, + "status": "working", + "filename": "index.rst", + "lineno": 1, + "uri": "https://localhost:7777/", + "info": "", + } + + +@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-https', freshenv=True) +def test_connect_to_selfsigned_with_requests_env_var(monkeypatch, app): + monkeypatch.setenv("REQUESTS_CA_BUNDLE", CERT_FILE) + with https_server(OKHandler): + app.build() + + with open(app.outdir / 'output.json', encoding='utf-8') as fp: + content = json.load(fp) + assert content == { + "code": 0, + "status": "working", + "filename": "index.rst", + "lineno": 1, + "uri": "https://localhost:7777/", + "info": "", + } + + +@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-https', freshenv=True) +def test_connect_to_selfsigned_nonexistent_cert_file(app): + app.config.tls_cacerts = "does/not/exist" + with https_server(OKHandler): + app.build() + + with open(app.outdir / 'output.json', encoding='utf-8') as fp: + content = json.load(fp) + assert content == { + "code": 0, + "status": "broken", + "filename": "index.rst", + "lineno": 1, + "uri": "https://localhost:7777/", + "info": "Could not find a suitable TLS CA certificate bundle, invalid path: does/not/exist", + } + + +class InfiniteRedirectOnHeadHandler(http.server.BaseHTTPRequestHandler): + protocol_version = "HTTP/1.1" + + def do_HEAD(self): + self.send_response(302, "Found") + self.send_header("Location", "http://localhost:7777/") + self.send_header("Content-Length", "0") + self.end_headers() + + def do_GET(self): + content = b"ok\n" + self.send_response(200, "OK") + self.send_header("Content-Length", str(len(content))) + self.end_headers() + self.wfile.write(content) + self.close_connection = True # we don't expect the client to read this response body + + +@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver', freshenv=True) +def test_TooManyRedirects_on_HEAD(app, monkeypatch): + import requests.sessions + + monkeypatch.setattr(requests.sessions, "DEFAULT_REDIRECT_LIMIT", 5) + + with http_server(InfiniteRedirectOnHeadHandler): + app.build() + + with open(app.outdir / 'output.json', encoding='utf-8') as fp: + content = json.load(fp) + assert content == { + "code": 0, + "status": "working", + "filename": "index.rst", + "lineno": 1, + "uri": "http://localhost:7777/", + "info": "", + } + + +def make_retry_after_handler(responses): + class RetryAfterHandler(http.server.BaseHTTPRequestHandler): + protocol_version = "HTTP/1.1" + + def do_HEAD(self): + status, retry_after = responses.pop(0) + self.send_response(status) + if retry_after: + self.send_header('Retry-After', retry_after) + self.send_header("Content-Length", "0") + self.end_headers() + + def log_date_time_string(self): + """Strip date and time from logged messages for assertions.""" + return "" + + return RetryAfterHandler + + +@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver', freshenv=True) +def test_too_many_requests_retry_after_int_delay(app, capsys, status): + with http_server(make_retry_after_handler([(429, "0"), (200, None)])), \ + mock.patch("sphinx.builders.linkcheck.DEFAULT_DELAY", 0), \ + mock.patch("sphinx.builders.linkcheck.QUEUE_POLL_SECS", 0.01): + app.build() + content = (app.outdir / 'output.json').read_text(encoding='utf8') + assert json.loads(content) == { + "filename": "index.rst", + "lineno": 1, + "status": "working", + "code": 0, + "uri": "http://localhost:7777/", + "info": "", + } + rate_limit_log = "-rate limited- http://localhost:7777/ | sleeping...\n" + assert rate_limit_log in strip_colors(status.getvalue()) + _stdout, stderr = capsys.readouterr() + assert stderr == textwrap.dedent( + """\ + 127.0.0.1 - - [] "HEAD / HTTP/1.1" 429 - + 127.0.0.1 - - [] "HEAD / HTTP/1.1" 200 - + """, + ) + + +@pytest.mark.parametrize('tz', [None, 'GMT', 'GMT+3', 'GMT-3']) +@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver', freshenv=True) +def test_too_many_requests_retry_after_HTTP_date(tz, app, monkeypatch, capsys): + retry_after = wsgiref.handlers.format_date_time(time.time()) + + with monkeypatch.context() as m: + if tz is not None: + m.setenv('TZ', tz) + if sys.platform != "win32": + time.tzset() + m.setattr(sphinx.util.http_date, '_GMT_OFFSET', + float(time.localtime().tm_gmtoff)) + + with http_server(make_retry_after_handler([(429, retry_after), (200, None)])): + app.build() + + content = (app.outdir / 'output.json').read_text(encoding='utf8') + assert json.loads(content) == { + "filename": "index.rst", + "lineno": 1, + "status": "working", + "code": 0, + "uri": "http://localhost:7777/", + "info": "", + } + _stdout, stderr = capsys.readouterr() + assert stderr == textwrap.dedent( + """\ + 127.0.0.1 - - [] "HEAD / HTTP/1.1" 429 - + 127.0.0.1 - - [] "HEAD / HTTP/1.1" 200 - + """, + ) + + +@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver', freshenv=True) +def test_too_many_requests_retry_after_without_header(app, capsys): + with http_server(make_retry_after_handler([(429, None), (200, None)])), \ + mock.patch("sphinx.builders.linkcheck.DEFAULT_DELAY", 0): + app.build() + content = (app.outdir / 'output.json').read_text(encoding='utf8') + assert json.loads(content) == { + "filename": "index.rst", + "lineno": 1, + "status": "working", + "code": 0, + "uri": "http://localhost:7777/", + "info": "", + } + _stdout, stderr = capsys.readouterr() + assert stderr == textwrap.dedent( + """\ + 127.0.0.1 - - [] "HEAD / HTTP/1.1" 429 - + 127.0.0.1 - - [] "HEAD / HTTP/1.1" 200 - + """, + ) + + +@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver', freshenv=True) +def test_too_many_requests_user_timeout(app): + app.config.linkcheck_rate_limit_timeout = 0.0 + with http_server(make_retry_after_handler([(429, None)])): + app.build() + content = (app.outdir / 'output.json').read_text(encoding='utf8') + assert json.loads(content) == { + "filename": "index.rst", + "lineno": 1, + "status": "broken", + "code": 0, + "uri": "http://localhost:7777/", + "info": "429 Client Error: Too Many Requests for url: http://localhost:7777/", + } + + +class FakeResponse: + headers: dict[str, str] = {} + url = "http://localhost/" + + +def test_limit_rate_default_sleep(app): + worker = HyperlinkAvailabilityCheckWorker(app.config, Queue(), Queue(), {}) + with mock.patch('time.time', return_value=0.0): + next_check = worker.limit_rate(FakeResponse.url, FakeResponse.headers.get("Retry-After")) + assert next_check == 60.0 + + +def test_limit_rate_user_max_delay(app): + app.config.linkcheck_rate_limit_timeout = 0.0 + worker = HyperlinkAvailabilityCheckWorker(app.config, Queue(), Queue(), {}) + next_check = worker.limit_rate(FakeResponse.url, FakeResponse.headers.get("Retry-After")) + assert next_check is None + + +def test_limit_rate_doubles_previous_wait_time(app): + rate_limits = {"localhost": RateLimit(60.0, 0.0)} + worker = HyperlinkAvailabilityCheckWorker(app.config, Queue(), Queue(), rate_limits) + with mock.patch('time.time', return_value=0.0): + next_check = worker.limit_rate(FakeResponse.url, FakeResponse.headers.get("Retry-After")) + assert next_check == 120.0 + + +def test_limit_rate_clips_wait_time_to_max_time(app): + app.config.linkcheck_rate_limit_timeout = 90.0 + rate_limits = {"localhost": RateLimit(60.0, 0.0)} + worker = HyperlinkAvailabilityCheckWorker(app.config, Queue(), Queue(), rate_limits) + with mock.patch('time.time', return_value=0.0): + next_check = worker.limit_rate(FakeResponse.url, FakeResponse.headers.get("Retry-After")) + assert next_check == 90.0 + + +def test_limit_rate_bails_out_after_waiting_max_time(app): + app.config.linkcheck_rate_limit_timeout = 90.0 + rate_limits = {"localhost": RateLimit(90.0, 0.0)} + worker = HyperlinkAvailabilityCheckWorker(app.config, Queue(), Queue(), rate_limits) + next_check = worker.limit_rate(FakeResponse.url, FakeResponse.headers.get("Retry-After")) + assert next_check is None + + +@mock.patch('sphinx.util.requests.requests.Session.get_adapter') +def test_connection_contention(get_adapter, app, capsys): + # Create a shared, but limited-size, connection pool + import requests + get_adapter.return_value = requests.adapters.HTTPAdapter(pool_maxsize=1) + + # Set an upper-bound on socket timeouts globally + import socket + socket.setdefaulttimeout(5) + + # Place a workload into the linkcheck queue + link_count = 10 + rqueue, wqueue = Queue(), Queue() + for _ in range(link_count): + wqueue.put(CheckRequest(0, Hyperlink("http://localhost:7777", "test", "test.rst", 1))) + + # Create parallel consumer threads + with http_server(make_redirect_handler(support_head=True)): + begin, checked = time.time(), [] + threads = [ + HyperlinkAvailabilityCheckWorker( + config=app.config, + rqueue=rqueue, + wqueue=wqueue, + rate_limits={}, + ) + for _ in range(10) + ] + for thread in threads: + thread.start() + while time.time() < begin + 5 and len(checked) < link_count: + checked.append(rqueue.get(timeout=5)) + for thread in threads: + thread.join(timeout=0) + + # Ensure that all items were consumed within the time limit + _, stderr = capsys.readouterr() + assert len(checked) == link_count + assert "TimeoutError" not in stderr + + +class ConnectionResetHandler(http.server.BaseHTTPRequestHandler): + protocol_version = "HTTP/1.1" + + def do_HEAD(self): + self.close_connection = True + + def do_GET(self): + self.send_response(200, "OK") + self.send_header("Content-Length", "0") + self.end_headers() + + +@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver', freshenv=True) +def test_get_after_head_raises_connection_error(app): + with http_server(ConnectionResetHandler): + app.build() + content = (app.outdir / 'output.txt').read_text(encoding='utf8') + assert not content + content = (app.outdir / 'output.json').read_text(encoding='utf8') + assert json.loads(content) == { + "filename": "index.rst", + "lineno": 1, + "status": "working", + "code": 0, + "uri": "http://localhost:7777/", + "info": "", + } + + +@pytest.mark.sphinx('linkcheck', testroot='linkcheck-documents_exclude', freshenv=True) +def test_linkcheck_exclude_documents(app): + with http_server(DefaultsHandler): + app.build() + + with open(app.outdir / 'output.json', encoding='utf-8') as fp: + content = [json.loads(record) for record in fp] + + assert content == [ + { + 'filename': 'broken_link.rst', + 'lineno': 4, + 'status': 'ignored', + 'code': 0, + 'uri': 'https://www.sphinx-doc.org/this-is-a-broken-link', + 'info': 'broken_link matched ^broken_link$ from linkcheck_exclude_documents', + }, + { + 'filename': 'br0ken_link.rst', + 'lineno': 4, + 'status': 'ignored', + 'code': 0, + 'uri': 'https://www.sphinx-doc.org/this-is-another-broken-link', + 'info': 'br0ken_link matched br[0-9]ken_link from linkcheck_exclude_documents', + }, + ] diff --git a/tests/test_build_manpage.py b/tests/test_build_manpage.py new file mode 100644 index 0000000..e765644 --- /dev/null +++ b/tests/test_build_manpage.py @@ -0,0 +1,105 @@ +"""Test the build process with manpage builder with the test root.""" + +import docutils +import pytest + +from sphinx.builders.manpage import default_man_pages +from sphinx.config import Config + + +@pytest.mark.sphinx('man') +def test_all(app, status, warning): + app.builder.build_all() + assert (app.outdir / 'sphinxtests.1').exists() + + content = (app.outdir / 'sphinxtests.1').read_text(encoding='utf8') + assert r'\fBprint \fP\fIi\fP\fB\en\fP' in content + assert r'\fBmanpage\en\fP' in content + + # heading (title + description) + assert r'sphinxtests \- Sphinx <Tests> 0.6alpha1' in content + + # term of definition list including nodes.strong + assert '\n.B term1\n' in content + assert '\nterm2 (\\fBstronged partially\\fP)\n' in content + + # test samp with braces + assert '\n\\fIvariable_only\\fP\n' in content + assert '\n\\fIvariable\\fP\\fB and text\\fP\n' in content + assert '\n\\fBShow \\fP\\fIvariable\\fP\\fB in the middle\\fP\n' in content + + assert 'Footnotes' not in content + + +@pytest.mark.sphinx('man', testroot='basic', + confoverrides={'man_pages': [('index', 'title', None, [], 1)]}) +def test_man_pages_empty_description(app, status, warning): + app.builder.build_all() + + content = (app.outdir / 'title.1').read_text(encoding='utf8') + assert r'title \-' not in content + + +@pytest.mark.sphinx('man', testroot='basic', + confoverrides={'man_make_section_directory': True}) +def test_man_make_section_directory(app, status, warning): + app.build() + assert (app.outdir / 'man1' / 'python.1').exists() + + +@pytest.mark.sphinx('man', testroot='directive-code') +def test_captioned_code_block(app, status, warning): + app.builder.build_all() + content = (app.outdir / 'python.1').read_text(encoding='utf8') + + if docutils.__version_info__[:2] < (0, 21): + expected = """\ +.sp +caption \\fItest\\fP rb +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +def ruby? + false +end +.ft P +.fi +.UNINDENT +.UNINDENT +""" + else: + expected = """\ +.sp +caption \\fItest\\fP rb +.INDENT 0.0 +.INDENT 3.5 +.sp +.EX +def ruby? + false +end +.EE +.UNINDENT +.UNINDENT +""" + + assert expected in content + + +def test_default_man_pages(): + config = Config({'project': 'STASI™ Documentation', + 'author': "Wolfgang Schäuble & G'Beckstein", + 'release': '1.0'}) + config.init_values() + expected = [('index', 'stasi', 'STASI™ Documentation 1.0', + ["Wolfgang Schäuble & G'Beckstein"], 1)] + assert default_man_pages(config) == expected + + +@pytest.mark.sphinx('man', testroot='markup-rubric') +def test_rubric(app, status, warning): + app.build() + content = (app.outdir / 'python.1').read_text(encoding='utf8') + assert 'This is a rubric\n' in content diff --git a/tests/test_build_texinfo.py b/tests/test_build_texinfo.py new file mode 100644 index 0000000..9964382 --- /dev/null +++ b/tests/test_build_texinfo.py @@ -0,0 +1,155 @@ +"""Test the build process with Texinfo builder with the test root.""" + +import os +import re +import subprocess +from pathlib import Path +from subprocess import CalledProcessError +from unittest.mock import Mock + +import pytest + +from sphinx.builders.texinfo import default_texinfo_documents +from sphinx.config import Config +from sphinx.testing.util import strip_escseq +from sphinx.util.docutils import new_document +from sphinx.writers.texinfo import TexinfoTranslator + +from .test_build_html import ENV_WARNINGS + +TEXINFO_WARNINGS = ENV_WARNINGS + """\ +%(root)s/index.rst:\\d+: WARNING: unknown option: '&option' +%(root)s/index.rst:\\d+: WARNING: citation not found: missing +%(root)s/index.rst:\\d+: WARNING: a suitable image for texinfo builder not found: foo.\\* +%(root)s/index.rst:\\d+: WARNING: a suitable image for texinfo builder not found: \ +\\['application/pdf', 'image/svg\\+xml'\\] \\(svgimg.\\*\\) +""" + + +@pytest.mark.sphinx('texinfo', testroot='warnings', freshenv=True) +def test_texinfo_warnings(app, status, warning): + app.builder.build_all() + warnings = strip_escseq(re.sub(re.escape(os.sep) + '{1,2}', '/', warning.getvalue())) + warnings_exp = TEXINFO_WARNINGS % { + 'root': re.escape(app.srcdir.as_posix())} + assert re.match(warnings_exp + '$', warnings), \ + "Warnings don't match:\n" + \ + '--- Expected (regex):\n' + warnings_exp + \ + '--- Got:\n' + warnings + + +@pytest.mark.sphinx('texinfo') +def test_texinfo(app, status, warning): + TexinfoTranslator.ignore_missing_images = True + app.builder.build_all() + result = (app.outdir / 'sphinxtests.texi').read_text(encoding='utf8') + assert ('@anchor{markup doc}@anchor{11}' + '@anchor{markup id1}@anchor{12}' + '@anchor{markup testing-various-markup}@anchor{13}' in result) + assert 'Footnotes' not in result + # now, try to run makeinfo over it + try: + args = ['makeinfo', '--no-split', 'sphinxtests.texi'] + subprocess.run(args, capture_output=True, cwd=app.outdir, check=True) + except OSError as exc: + raise pytest.skip.Exception from exc # most likely makeinfo was not found + except CalledProcessError as exc: + print(exc.stdout) + print(exc.stderr) + msg = f'makeinfo exited with return code {exc.retcode}' + raise AssertionError(msg) from exc + + +@pytest.mark.sphinx('texinfo', testroot='markup-rubric') +def test_texinfo_rubric(app, status, warning): + app.build() + + output = (app.outdir / 'python.texi').read_text(encoding='utf8') + assert '@heading This is a rubric' in output + assert '@heading This is a multiline rubric' in output + + +@pytest.mark.sphinx('texinfo', testroot='markup-citation') +def test_texinfo_citation(app, status, warning): + app.builder.build_all() + + output = (app.outdir / 'python.texi').read_text(encoding='utf8') + assert 'This is a citation ref; @ref{1,,[CITE1]} and @ref{2,,[CITE2]}.' in output + assert ('@anchor{index cite1}@anchor{1}@w{(CITE1)} \n' + 'This is a citation\n') in output + assert ('@anchor{index cite2}@anchor{2}@w{(CITE2)} \n' + 'This is a multiline citation\n') in output + + +def test_default_texinfo_documents(): + config = Config({'project': 'STASI™ Documentation', + 'author': "Wolfgang Schäuble & G'Beckstein"}) + config.init_values() + expected = [('index', 'stasi', 'STASI™ Documentation', + "Wolfgang Schäuble & G'Beckstein", 'stasi', + 'One line description of project', 'Miscellaneous')] + assert default_texinfo_documents(config) == expected + + +@pytest.mark.sphinx('texinfo') +def test_texinfo_escape_id(app, status, warning): + settings = Mock(title='', + texinfo_dir_entry='', + texinfo_elements={}) + document = new_document('', settings) + translator = app.builder.create_translator(document, app.builder) + + assert translator.escape_id('Hello world') == 'Hello world' + assert translator.escape_id('Hello world') == 'Hello world' + assert translator.escape_id('Hello Sphinx world') == 'Hello Sphinx world' + assert translator.escape_id('Hello:world') == 'Hello world' + assert translator.escape_id('Hello(world)') == 'Hello world' + assert translator.escape_id('Hello world.') == 'Hello world' + assert translator.escape_id('.') == '.' + + +@pytest.mark.sphinx('texinfo', testroot='footnotes') +def test_texinfo_footnote(app, status, warning): + app.builder.build_all() + + output = (app.outdir / 'python.texi').read_text(encoding='utf8') + assert 'First footnote: @footnote{\nFirst\n}' in output + + +@pytest.mark.sphinx('texinfo') +def test_texinfo_xrefs(app, status, warning): + app.builder.build_all() + output = (app.outdir / 'sphinxtests.texi').read_text(encoding='utf8') + assert re.search(r'@ref{\w+,,--plugin\.option}', output) + + # Now rebuild it without xrefs + app.config.texinfo_cross_references = False + app.builder.build_all() + output = (app.outdir / 'sphinxtests.texi').read_text(encoding='utf8') + assert not re.search(r'@ref{\w+,,--plugin\.option}', output) + assert 'Link to perl +p, --ObjC++, --plugin.option, create-auth-token, arg and -j' in output + + +@pytest.mark.sphinx('texinfo', testroot='root') +def test_texinfo_samp_with_variable(app, status, warning): + app.build() + + output = (app.outdir / 'sphinxtests.texi').read_text(encoding='utf8') + + assert '@code{@var{variable_only}}' in output + assert '@code{@var{variable} and text}' in output + assert '@code{Show @var{variable} in the middle}' in output + + +@pytest.mark.sphinx('texinfo', testroot='images') +def test_copy_images(app, status, warning): + app.build() + + images_dir = Path(app.outdir) / 'python-figures' + images = {image.name for image in images_dir.rglob('*')} + images.discard('python-logo.png') + assert images == { + 'img.png', + 'rimg.png', + 'testimäge.png', + } diff --git a/tests/test_build_text.py b/tests/test_build_text.py new file mode 100644 index 0000000..4a53be3 --- /dev/null +++ b/tests/test_build_text.py @@ -0,0 +1,278 @@ +"""Test the build process with Text builder with the test root.""" + +import pytest +from docutils.utils import column_width + +from sphinx.writers.text import MAXWIDTH, Cell, Table + + +def with_text_app(*args, **kw): + default_kw = { + 'buildername': 'text', + 'testroot': 'build-text', + } + default_kw.update(kw) + return pytest.mark.sphinx(*args, **default_kw) + + +@with_text_app() +def test_maxwitdh_with_prefix(app, status, warning): + app.builder.build_update() + result = (app.outdir / 'maxwidth.txt').read_text(encoding='utf8') + + lines = result.splitlines() + line_widths = [column_width(line) for line in lines] + assert max(line_widths) < MAXWIDTH + assert lines[0].startswith('See also:') + assert lines[1].startswith('') + assert lines[2].startswith(' ham') + assert lines[3].startswith(' ham') + assert lines[4] == '' + assert lines[5].startswith('* ham') + assert lines[6].startswith(' ham') + assert lines[7] == '' + assert lines[8].startswith('* ham') + assert lines[9].startswith(' ham') + assert lines[10] == '' + assert lines[11].startswith('spam egg') + + +@with_text_app() +def test_lineblock(app, status, warning): + # regression test for #1109: need empty line after line block + app.builder.build_update() + result = (app.outdir / 'lineblock.txt').read_text(encoding='utf8') + expect = ( + "* one\n" + "\n" + " line-block 1\n" + " line-block 2\n" + "\n" + "followed paragraph.\n" + ) + assert result == expect + + +@with_text_app() +def test_nonascii_title_line(app, status, warning): + app.builder.build_update() + result = (app.outdir / 'nonascii_title.txt').read_text(encoding='utf8') + expect_underline = '*********' + result_underline = result.splitlines()[1].strip() + assert expect_underline == result_underline + + +@with_text_app() +def test_nonascii_table(app, status, warning): + app.builder.build_update() + result = (app.outdir / 'nonascii_table.txt').read_text(encoding='utf8') + lines = [line.strip() for line in result.splitlines() if line.strip()] + line_widths = [column_width(line) for line in lines] + assert len(set(line_widths)) == 1 # same widths + + +@with_text_app() +def test_nonascii_maxwidth(app, status, warning): + app.builder.build_update() + result = (app.outdir / 'nonascii_maxwidth.txt').read_text(encoding='utf8') + lines = [line.strip() for line in result.splitlines() if line.strip()] + line_widths = [column_width(line) for line in lines] + assert max(line_widths) < MAXWIDTH + + +def test_table_builder(): + table = Table([6, 6]) + table.add_cell(Cell("foo")) + table.add_cell(Cell("bar")) + table_str = str(table).split("\n") + assert table_str[0] == "+--------+--------+" + assert table_str[1] == "| foo | bar |" + assert table_str[2] == "+--------+--------+" + assert repr(table).count("<Cell ") == 2 + + +def test_table_separator(): + table = Table([6, 6]) + table.add_cell(Cell("foo")) + table.add_cell(Cell("bar")) + table.set_separator() + table.add_row() + table.add_cell(Cell("FOO")) + table.add_cell(Cell("BAR")) + table_str = str(table).split("\n") + assert table_str[0] == "+--------+--------+" + assert table_str[1] == "| foo | bar |" + assert table_str[2] == "|========|========|" + assert table_str[3] == "| FOO | BAR |" + assert table_str[4] == "+--------+--------+" + assert repr(table).count("<Cell ") == 4 + + +def test_table_cell(): + cell = Cell("Foo bar baz") + cell.wrap(3) + assert "Cell" in repr(cell) + assert cell.wrapped == ["Foo", "bar", "baz"] + + +@with_text_app() +def test_table_with_empty_cell(app, status, warning): + app.builder.build_update() + result = (app.outdir / 'table.txt').read_text(encoding='utf8') + lines = [line.strip() for line in result.splitlines() if line.strip()] + assert lines[0] == "+-------+-------+" + assert lines[1] == "| XXX | XXX |" + assert lines[2] == "+-------+-------+" + assert lines[3] == "| | XXX |" + assert lines[4] == "+-------+-------+" + assert lines[5] == "| XXX | |" + assert lines[6] == "+-------+-------+" + + +@with_text_app() +def test_table_with_rowspan(app, status, warning): + app.builder.build_update() + result = (app.outdir / 'table_rowspan.txt').read_text(encoding='utf8') + lines = [line.strip() for line in result.splitlines() if line.strip()] + assert lines[0] == "+-------+-------+" + assert lines[1] == "| XXXXXXXXX |" + assert lines[2] == "+-------+-------+" + assert lines[3] == "| | XXX |" + assert lines[4] == "+-------+-------+" + assert lines[5] == "| XXX | |" + assert lines[6] == "+-------+-------+" + + +@with_text_app() +def test_table_with_colspan(app, status, warning): + app.builder.build_update() + result = (app.outdir / 'table_colspan.txt').read_text(encoding='utf8') + lines = [line.strip() for line in result.splitlines() if line.strip()] + assert lines[0] == "+-------+-------+" + assert lines[1] == "| XXX | XXX |" + assert lines[2] == "+-------+-------+" + assert lines[3] == "| | XXX |" + assert lines[4] == "+-------+ |" + assert lines[5] == "| XXX | |" + assert lines[6] == "+-------+-------+" + + +@with_text_app() +def test_table_with_colspan_left(app, status, warning): + app.builder.build_update() + result = (app.outdir / 'table_colspan_left.txt').read_text(encoding='utf8') + lines = [line.strip() for line in result.splitlines() if line.strip()] + assert lines[0] == "+-------+-------+" + assert lines[1] == "| XXX | XXX |" + assert lines[2] == "+-------+-------+" + assert lines[3] == "| XXX | XXX |" + assert lines[4] == "| +-------+" + assert lines[5] == "| | |" + assert lines[6] == "+-------+-------+" + + +@with_text_app() +def test_table_with_colspan_and_rowspan(app, status, warning): + app.builder.build_update() + result = (app.outdir / 'table_colspan_and_rowspan.txt').read_text(encoding='utf8') + lines = [line.strip() for line in result.splitlines() if line.strip()] + assert result + assert lines[0] == "+-------+-------+-------+" + assert lines[1] == "| AAA | BBB |" + assert lines[2] == "+-------+-------+ |" + assert lines[3] == "| DDD | XXX | |" + assert lines[4] == "| +-------+-------+" + assert lines[5] == "| | CCC |" + assert lines[6] == "+-------+-------+-------+" + + +@with_text_app() +def test_list_items_in_admonition(app, status, warning): + app.builder.build_update() + result = (app.outdir / 'listitems.txt').read_text(encoding='utf8') + lines = [line.rstrip() for line in result.splitlines()] + assert lines[0] == "See also:" + assert lines[1] == "" + assert lines[2] == " * item 1" + assert lines[3] == "" + assert lines[4] == " * item 2" + + +@with_text_app() +def test_secnums(app, status, warning): + app.builder.build_all() + index = (app.outdir / 'index.txt').read_text(encoding='utf8') + lines = index.splitlines() + assert lines[0] == "* 1. Section A" + assert lines[1] == "" + assert lines[2] == "* 2. Section B" + assert lines[3] == "" + assert lines[4] == " * 2.1. Sub Ba" + assert lines[5] == "" + assert lines[6] == " * 2.2. Sub Bb" + doc2 = (app.outdir / 'doc2.txt').read_text(encoding='utf8') + expect = ( + "2. Section B\n" + "************\n" + "\n" + "\n" + "2.1. Sub Ba\n" + "===========\n" + "\n" + "\n" + "2.2. Sub Bb\n" + "===========\n" + ) + assert doc2 == expect + + app.config.text_secnumber_suffix = " " + app.builder.build_all() + index = (app.outdir / 'index.txt').read_text(encoding='utf8') + lines = index.splitlines() + assert lines[0] == "* 1 Section A" + assert lines[1] == "" + assert lines[2] == "* 2 Section B" + assert lines[3] == "" + assert lines[4] == " * 2.1 Sub Ba" + assert lines[5] == "" + assert lines[6] == " * 2.2 Sub Bb" + doc2 = (app.outdir / 'doc2.txt').read_text(encoding='utf8') + expect = ( + "2 Section B\n" + "***********\n" + "\n" + "\n" + "2.1 Sub Ba\n" + "==========\n" + "\n" + "\n" + "2.2 Sub Bb\n" + "==========\n" + ) + assert doc2 == expect + + app.config.text_add_secnumbers = False + app.builder.build_all() + index = (app.outdir / 'index.txt').read_text(encoding='utf8') + lines = index.splitlines() + assert lines[0] == "* Section A" + assert lines[1] == "" + assert lines[2] == "* Section B" + assert lines[3] == "" + assert lines[4] == " * Sub Ba" + assert lines[5] == "" + assert lines[6] == " * Sub Bb" + doc2 = (app.outdir / 'doc2.txt').read_text(encoding='utf8') + expect = ( + "Section B\n" + "*********\n" + "\n" + "\n" + "Sub Ba\n" + "======\n" + "\n" + "\n" + "Sub Bb\n" + "======\n" + ) + assert doc2 == expect diff --git a/tests/test_builder.py b/tests/test_builder.py new file mode 100644 index 0000000..1ff8aea --- /dev/null +++ b/tests/test_builder.py @@ -0,0 +1,39 @@ +"""Test the Builder class.""" +import pytest + + +@pytest.mark.sphinx('dummy', srcdir="test_builder", freshenv=True) +def test_incremental_reading(app): + # first reading + updated = app.builder.read() + assert set(updated) == app.env.found_docs == set(app.env.all_docs) + assert updated == sorted(updated) # sorted by alphanumeric + + # test if exclude_patterns works ok + assert 'subdir/excluded' not in app.env.found_docs + + # before second reading, add, modify and remove source files + (app.srcdir / 'new.txt').write_text('New file\n========\n', encoding='utf8') + app.env.all_docs['index'] = 0 # mark as modified + (app.srcdir / 'autodoc.txt').unlink() + + # second reading + updated = app.builder.read() + + assert set(updated) == {'index', 'new'} + assert 'autodoc' not in app.env.all_docs + assert 'autodoc' not in app.env.found_docs + + +@pytest.mark.sphinx('dummy', testroot='warnings', freshenv=True) +def test_incremental_reading_for_missing_files(app): + # first reading + updated = app.builder.read() + assert set(updated) == app.env.found_docs == set(app.env.all_docs) + + # second reading + updated = app.builder.read() + + # "index" is listed up to updated because it contains references + # to nonexisting downloadable or image files + assert set(updated) == {'index'} diff --git a/tests/test_catalogs.py b/tests/test_catalogs.py new file mode 100644 index 0000000..b7fd7be --- /dev/null +++ b/tests/test_catalogs.py @@ -0,0 +1,74 @@ +"""Test the base build process.""" +import shutil +from pathlib import Path + +import pytest + + +@pytest.fixture() +def _setup_test(app_params): + assert isinstance(app_params.kwargs['srcdir'], Path) + srcdir = app_params.kwargs['srcdir'] + src_locale_dir = srcdir / 'xx' / 'LC_MESSAGES' + dest_locale_dir = srcdir / 'locale' + # copy all catalogs into locale layout directory + for po in src_locale_dir.rglob('*.po'): + copy_po = (dest_locale_dir / 'en' / 'LC_MESSAGES' / po.relative_to(src_locale_dir)) + if not copy_po.parent.exists(): + copy_po.parent.mkdir(parents=True, exist_ok=True) + shutil.copy(po, copy_po) + + yield + + # delete remnants left over after failed build + shutil.rmtree(dest_locale_dir, ignore_errors=True) + shutil.rmtree(srcdir / '_build', ignore_errors=True) + + +@pytest.mark.usefixtures('_setup_test') +@pytest.mark.test_params(shared_result='test-catalogs') +@pytest.mark.sphinx( + 'html', testroot='intl', + confoverrides={'language': 'en', 'locale_dirs': ['./locale']}) +def test_compile_all_catalogs(app, status, warning): + app.builder.compile_all_catalogs() + + locale_dir = app.srcdir / 'locale' + catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES' + expect = {x.with_suffix('.mo') for x in catalog_dir.rglob('*.po')} + actual = set(catalog_dir.rglob('*.mo')) + assert actual # not empty + assert actual == expect + + +@pytest.mark.usefixtures('_setup_test') +@pytest.mark.test_params(shared_result='test-catalogs') +@pytest.mark.sphinx( + 'html', testroot='intl', + confoverrides={'language': 'en', 'locale_dirs': ['./locale']}) +def test_compile_specific_catalogs(app, status, warning): + locale_dir = app.srcdir / 'locale' + catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES' + + actual_on_boot = set(catalog_dir.rglob('*.mo')) # sphinx.mo might be included + app.builder.compile_specific_catalogs([app.srcdir / 'admonitions.txt']) + actual = {str(x.relative_to(catalog_dir)) + for x in catalog_dir.rglob('*.mo') + if x not in actual_on_boot} + assert actual == {'admonitions.mo'} + + +@pytest.mark.usefixtures('_setup_test') +@pytest.mark.test_params(shared_result='test-catalogs') +@pytest.mark.sphinx( + 'html', testroot='intl', + confoverrides={'language': 'en', 'locale_dirs': ['./locale']}) +def test_compile_update_catalogs(app, status, warning): + app.builder.compile_update_catalogs() + + locale_dir = app.srcdir / 'locale' + catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES' + expect = {x.with_suffix('.mo') for x in set(catalog_dir.rglob('*.po'))} + actual = set(catalog_dir.rglob('*.mo')) + assert actual # not empty + assert actual == expect diff --git a/tests/test_config.py b/tests/test_config.py new file mode 100644 index 0000000..0be0a58 --- /dev/null +++ b/tests/test_config.py @@ -0,0 +1,517 @@ +"""Test the sphinx.config.Config class.""" + +import time +from pathlib import Path +from unittest import mock + +import pytest + +import sphinx +from sphinx.config import ENUM, Config, check_confval_types +from sphinx.errors import ConfigError, ExtensionError, VersionRequirementError + + +@pytest.mark.sphinx(testroot='config', confoverrides={ + 'root_doc': 'root', + 'nonexisting_value': 'True', + 'latex_elements.maketitle': 'blah blah blah', + 'modindex_common_prefix': 'path1,path2'}) +def test_core_config(app, status, warning): + cfg = app.config + + # simple values + assert 'project' in cfg.__dict__ + assert cfg.project == 'Sphinx <Tests>' + assert cfg.templates_path == ['_templates'] + + # overrides + assert cfg.root_doc == 'root' + assert cfg.latex_elements['maketitle'] == 'blah blah blah' + assert cfg.modindex_common_prefix == ['path1', 'path2'] + + # simple default values + assert 'locale_dirs' not in cfg.__dict__ + assert cfg.locale_dirs == ['locales'] + assert cfg.trim_footnote_reference_space is False + + # complex default values + assert 'html_title' not in cfg.__dict__ + assert cfg.html_title == 'Sphinx <Tests> 0.6alpha1 documentation' + + # complex default values mustn't raise + for valuename in cfg.config_values: + getattr(cfg, valuename) + + # "contains" gives True both for set and unset values + assert 'project' in cfg + assert 'html_title' in cfg + assert 'nonexisting_value' not in cfg + + # invalid values + with pytest.raises(AttributeError): + _ = cfg._value + with pytest.raises(AttributeError): + _ = cfg.nonexisting_value + + # non-value attributes are deleted from the namespace + with pytest.raises(AttributeError): + _ = cfg.sys + + # setting attributes + cfg.project = 'Foo' + assert cfg.project == 'Foo' + + # alternative access via item interface + cfg['project'] = 'Sphinx Tests' + assert cfg['project'] == cfg.project == 'Sphinx Tests' + + +def test_config_not_found(tmp_path): + with pytest.raises(ConfigError): + Config.read(tmp_path) + + +def test_extension_values(): + config = Config() + + # check standard settings + assert config.root_doc == 'index' + + # can't override it by add_config_value() + with pytest.raises(ExtensionError) as excinfo: + config.add('root_doc', 'index', 'env', None) + assert 'already present' in str(excinfo.value) + + # add a new config value + config.add('value_from_ext', [], 'env', None) + assert config.value_from_ext == [] + + # can't override it by add_config_value() + with pytest.raises(ExtensionError) as excinfo: + config.add('value_from_ext', [], 'env', None) + assert 'already present' in str(excinfo.value) + + +def test_overrides(): + config = Config({'value1': '1', 'value2': 2, 'value6': {'default': 6}}, + {'value2': 999, 'value3': '999', 'value5.attr1': 999, 'value6.attr1': 999, + 'value7': 'abc,def,ghi', 'value8': 'abc,def,ghi'}) + config.add('value1', None, 'env', ()) + config.add('value2', None, 'env', ()) + config.add('value3', 0, 'env', ()) + config.add('value4', 0, 'env', ()) + config.add('value5', {'default': 0}, 'env', ()) + config.add('value6', {'default': 0}, 'env', ()) + config.add('value7', None, 'env', ()) + config.add('value8', [], 'env', ()) + config.init_values() + + assert config.value1 == '1' + assert config.value2 == 999 + assert config.value3 == 999 + assert config.value4 == 0 + assert config.value5 == {'attr1': 999} + assert config.value6 == {'default': 6, 'attr1': 999} + assert config.value7 == 'abc,def,ghi' + assert config.value8 == ['abc', 'def', 'ghi'] + + +def test_overrides_boolean(): + config = Config({}, {'value1': '1', + 'value2': '0', + 'value3': '0'}) + config.add('value1', None, 'env', [bool]) + config.add('value2', None, 'env', [bool]) + config.add('value3', True, 'env', ()) + config.init_values() + + assert config.value1 is True + assert config.value2 is False + assert config.value3 is False + + +@mock.patch("sphinx.config.logger") +def test_errors_warnings(logger, tmp_path): + # test the error for syntax errors in the config file + (tmp_path / 'conf.py').write_text('project = \n', encoding='ascii') + with pytest.raises(ConfigError) as excinfo: + Config.read(tmp_path, {}, None) + assert 'conf.py' in str(excinfo.value) + + # test the automatic conversion of 2.x only code in configs + (tmp_path / 'conf.py').write_text('project = u"Jägermeister"\n', encoding='utf8') + cfg = Config.read(tmp_path, {}, None) + cfg.init_values() + assert cfg.project == 'Jägermeister' + assert logger.called is False + + +def test_errors_if_setup_is_not_callable(tmp_path, make_app): + # test the error to call setup() in the config file + (tmp_path / 'conf.py').write_text('setup = 1', encoding='utf8') + with pytest.raises(ConfigError) as excinfo: + make_app(srcdir=tmp_path) + assert 'callable' in str(excinfo.value) + + +@pytest.fixture() +def make_app_with_empty_project(make_app, tmp_path): + (tmp_path / 'conf.py').write_text('', encoding='utf8') + + def _make_app(*args, **kw): + kw.setdefault('srcdir', Path(tmp_path)) + return make_app(*args, **kw) + return _make_app + + +@mock.patch.object(sphinx, '__display_version__', '1.6.4') +def test_needs_sphinx(make_app_with_empty_project): + make_app = make_app_with_empty_project + # micro version + make_app(confoverrides={'needs_sphinx': '1.6.3'}) # OK: less + make_app(confoverrides={'needs_sphinx': '1.6.4'}) # OK: equals + with pytest.raises(VersionRequirementError): + make_app(confoverrides={'needs_sphinx': '1.6.5'}) # NG: greater + + # minor version + make_app(confoverrides={'needs_sphinx': '1.5'}) # OK: less + make_app(confoverrides={'needs_sphinx': '1.6'}) # OK: equals + with pytest.raises(VersionRequirementError): + make_app(confoverrides={'needs_sphinx': '1.7'}) # NG: greater + + # major version + make_app(confoverrides={'needs_sphinx': '0'}) # OK: less + make_app(confoverrides={'needs_sphinx': '1'}) # OK: equals + with pytest.raises(VersionRequirementError): + make_app(confoverrides={'needs_sphinx': '2'}) # NG: greater + + +@mock.patch("sphinx.config.logger") +def test_config_eol(logger, tmp_path): + # test config file's eol patterns: LF, CRLF + configfile = tmp_path / 'conf.py' + for eol in (b'\n', b'\r\n'): + configfile.write_bytes(b'project = "spam"' + eol) + cfg = Config.read(tmp_path, {}, None) + cfg.init_values() + assert cfg.project == 'spam' + assert logger.called is False + + +@pytest.mark.sphinx(confoverrides={'root_doc': 123, + 'language': 'foo', + 'primary_domain': None}) +def test_builtin_conf(app, status, warning): + warnings = warning.getvalue() + assert 'root_doc' in warnings, ( + 'override on builtin "root_doc" should raise a type warning') + assert 'language' not in warnings, ( + 'explicitly permitted override on builtin "language" should NOT raise ' + 'a type warning') + assert 'primary_domain' not in warnings, ( + 'override to None on builtin "primary_domain" should NOT raise a type ' + 'warning') + + +# example classes for type checking +class A: + pass + + +class B(A): + pass + + +class C(A): + pass + + +# name, default, annotation, actual, warned +TYPECHECK_WARNINGS = [ + ('value1', 'string', None, 123, True), # wrong type + ('value2', lambda _: [], None, 123, True), # lambda with wrong type + ('value3', lambda _: [], None, [], False), # lambda with correct type + ('value4', 100, None, True, True), # child type + ('value5', False, None, True, False), # parent type + ('value6', [], None, (), True), # other sequence type + ('value7', 'string', [list], ['foo'], False), # explicit type annotation + ('value8', B(), None, C(), False), # sibling type + ('value9', None, None, 'foo', False), # no default or no annotations + ('value10', None, None, 123, False), # no default or no annotations + ('value11', None, [str], 'bar', False), # str + ('value12', 'string', None, 'bar', False), # str +] + + +@mock.patch("sphinx.config.logger") +@pytest.mark.parametrize(('name', 'default', 'annotation', 'actual', 'warned'), TYPECHECK_WARNINGS) +def test_check_types(logger, name, default, annotation, actual, warned): + config = Config({name: actual}) + config.add(name, default, 'env', annotation or ()) + config.init_values() + check_confval_types(None, config) + assert logger.warning.called == warned + + +TYPECHECK_WARNING_MESSAGES = [ + ('value1', 'string', [str], ['foo', 'bar'], + "The config value `value1' has type `list'; expected `str'."), + ('value1', 'string', [str, int], ['foo', 'bar'], + "The config value `value1' has type `list'; expected `str' or `int'."), + ('value1', 'string', [str, int, tuple], ['foo', 'bar'], + "The config value `value1' has type `list'; expected `str', `int', or `tuple'."), +] + + +@mock.patch("sphinx.config.logger") +@pytest.mark.parametrize(('name', 'default', 'annotation', 'actual', 'message'), TYPECHECK_WARNING_MESSAGES) +def test_conf_warning_message(logger, name, default, annotation, actual, message): + config = Config({name: actual}) + config.add(name, default, False, annotation or ()) + config.init_values() + check_confval_types(None, config) + assert logger.warning.called + assert logger.warning.call_args[0][0] == message + + +@mock.patch("sphinx.config.logger") +def test_check_enum(logger): + config = Config() + config.add('value', 'default', False, ENUM('default', 'one', 'two')) + config.init_values() + check_confval_types(None, config) + logger.warning.assert_not_called() # not warned + + +@mock.patch("sphinx.config.logger") +def test_check_enum_failed(logger): + config = Config({'value': 'invalid'}) + config.add('value', 'default', False, ENUM('default', 'one', 'two')) + config.init_values() + check_confval_types(None, config) + assert logger.warning.called + + +@mock.patch("sphinx.config.logger") +def test_check_enum_for_list(logger): + config = Config({'value': ['one', 'two']}) + config.add('value', 'default', False, ENUM('default', 'one', 'two')) + config.init_values() + check_confval_types(None, config) + logger.warning.assert_not_called() # not warned + + +@mock.patch("sphinx.config.logger") +def test_check_enum_for_list_failed(logger): + config = Config({'value': ['one', 'two', 'invalid']}) + config.add('value', 'default', False, ENUM('default', 'one', 'two')) + config.init_values() + check_confval_types(None, config) + assert logger.warning.called + + +nitpick_warnings = [ + "WARNING: py:const reference target not found: prefix.anything.postfix", + "WARNING: py:class reference target not found: prefix.anything", + "WARNING: py:class reference target not found: anything.postfix", + "WARNING: js:class reference target not found: prefix.anything.postfix", +] + + +@pytest.mark.sphinx(testroot='nitpicky-warnings') +def test_nitpick_base(app, status, warning): + app.builder.build_all() + + warning = warning.getvalue().strip().split('\n') + assert len(warning) == len(nitpick_warnings) + for actual, expected in zip(warning, nitpick_warnings): + assert expected in actual + + +@pytest.mark.sphinx(testroot='nitpicky-warnings', confoverrides={ + 'nitpick_ignore': { + ('py:const', 'prefix.anything.postfix'), + ('py:class', 'prefix.anything'), + ('py:class', 'anything.postfix'), + ('js:class', 'prefix.anything.postfix'), + }, +}) +def test_nitpick_ignore(app, status, warning): + app.builder.build_all() + assert not len(warning.getvalue().strip()) + + +@pytest.mark.sphinx(testroot='nitpicky-warnings', confoverrides={ + 'nitpick_ignore_regex': [ + (r'py:.*', r'.*postfix'), + (r'.*:class', r'prefix.*'), + ], +}) +def test_nitpick_ignore_regex1(app, status, warning): + app.builder.build_all() + assert not len(warning.getvalue().strip()) + + +@pytest.mark.sphinx(testroot='nitpicky-warnings', confoverrides={ + 'nitpick_ignore_regex': [ + (r'py:.*', r'prefix.*'), + (r'.*:class', r'.*postfix'), + ], +}) +def test_nitpick_ignore_regex2(app, status, warning): + app.builder.build_all() + assert not len(warning.getvalue().strip()) + + +@pytest.mark.sphinx(testroot='nitpicky-warnings', confoverrides={ + 'nitpick_ignore_regex': [ + # None of these should match + (r'py:', r'.*'), + (r':class', r'.*'), + (r'', r'.*'), + (r'.*', r'anything'), + (r'.*', r'prefix'), + (r'.*', r'postfix'), + (r'.*', r''), + ], +}) +def test_nitpick_ignore_regex_fullmatch(app, status, warning): + app.builder.build_all() + + warning = warning.getvalue().strip().split('\n') + assert len(warning) == len(nitpick_warnings) + for actual, expected in zip(warning, nitpick_warnings): + assert expected in actual + + +def test_conf_py_language_none(tmp_path): + """Regression test for #10474.""" + + # Given a conf.py file with language = None + (tmp_path / 'conf.py').write_text("language = None", encoding='utf-8') + + # When we load conf.py into a Config object + cfg = Config.read(tmp_path, {}, None) + cfg.init_values() + + # Then the language is coerced to English + assert cfg.language == "en" + + +@mock.patch("sphinx.config.logger") +def test_conf_py_language_none_warning(logger, tmp_path): + """Regression test for #10474.""" + + # Given a conf.py file with language = None + (tmp_path / 'conf.py').write_text("language = None", encoding='utf-8') + + # When we load conf.py into a Config object + Config.read(tmp_path, {}, None) + + # Then a warning is raised + assert logger.warning.called + assert logger.warning.call_args[0][0] == ( + "Invalid configuration value found: 'language = None'. " + "Update your configuration to a valid language code. " + "Falling back to 'en' (English).") + + +def test_conf_py_no_language(tmp_path): + """Regression test for #10474.""" + + # Given a conf.py file with no language attribute + (tmp_path / 'conf.py').write_text("", encoding='utf-8') + + # When we load conf.py into a Config object + cfg = Config.read(tmp_path, {}, None) + cfg.init_values() + + # Then the language is coerced to English + assert cfg.language == "en" + + +def test_conf_py_nitpick_ignore_list(tmp_path): + """Regression test for #11355.""" + + # Given a conf.py file with no language attribute + (tmp_path / 'conf.py').write_text("", encoding='utf-8') + + # When we load conf.py into a Config object + cfg = Config.read(tmp_path, {}, None) + cfg.init_values() + + # Then the default nitpick_ignore[_regex] is an empty list + assert cfg.nitpick_ignore == [] + assert cfg.nitpick_ignore_regex == [] + + +@pytest.fixture(params=[ + # test with SOURCE_DATE_EPOCH unset: no modification + None, + # test with SOURCE_DATE_EPOCH set: copyright year should be updated + 1293840000, + 1293839999, +]) +def source_date_year(request, monkeypatch): + sde = request.param + with monkeypatch.context() as m: + if sde: + m.setenv('SOURCE_DATE_EPOCH', str(sde)) + yield time.gmtime(sde).tm_year + else: + m.delenv('SOURCE_DATE_EPOCH', raising=False) + yield None + + +@pytest.mark.sphinx(testroot='copyright-multiline') +def test_multi_line_copyright(source_date_year, app, monkeypatch): + app.builder.build_all() + + content = (app.outdir / 'index.html').read_text(encoding='utf-8') + + if source_date_year is None: + # check the copyright footer line by line (empty lines ignored) + assert ' © Copyright 2006.<br/>\n' in content + assert ' © Copyright 2006-2009, Alice.<br/>\n' in content + assert ' © Copyright 2010-2013, Bob.<br/>\n' in content + assert ' © Copyright 2014-2017, Charlie.<br/>\n' in content + assert ' © Copyright 2018-2021, David.<br/>\n' in content + assert ' © Copyright 2022-2025, Eve.' in content + + # check the raw copyright footer block (empty lines included) + assert ( + ' © Copyright 2006.<br/>\n' + ' \n' + ' © Copyright 2006-2009, Alice.<br/>\n' + ' \n' + ' © Copyright 2010-2013, Bob.<br/>\n' + ' \n' + ' © Copyright 2014-2017, Charlie.<br/>\n' + ' \n' + ' © Copyright 2018-2021, David.<br/>\n' + ' \n' + ' © Copyright 2022-2025, Eve.' + ) in content + else: + # check the copyright footer line by line (empty lines ignored) + assert f' © Copyright {source_date_year}.<br/>\n' in content + assert f' © Copyright 2006-{source_date_year}, Alice.<br/>\n' in content + assert f' © Copyright 2010-{source_date_year}, Bob.<br/>\n' in content + assert f' © Copyright 2014-{source_date_year}, Charlie.<br/>\n' in content + assert f' © Copyright 2018-{source_date_year}, David.<br/>\n' in content + assert f' © Copyright 2022-{source_date_year}, Eve.' in content + + # check the raw copyright footer block (empty lines included) + assert ( + f' © Copyright {source_date_year}.<br/>\n' + f' \n' + f' © Copyright 2006-{source_date_year}, Alice.<br/>\n' + f' \n' + f' © Copyright 2010-{source_date_year}, Bob.<br/>\n' + f' \n' + f' © Copyright 2014-{source_date_year}, Charlie.<br/>\n' + f' \n' + f' © Copyright 2018-{source_date_year}, David.<br/>\n' + f' \n' + f' © Copyright 2022-{source_date_year}, Eve.' + ) in content diff --git a/tests/test_correct_year.py b/tests/test_correct_year.py new file mode 100644 index 0000000..4ef77a6 --- /dev/null +++ b/tests/test_correct_year.py @@ -0,0 +1,29 @@ +"""Test copyright year adjustment""" +import pytest + + +@pytest.fixture( + params=[ + # test with SOURCE_DATE_EPOCH unset: no modification + (None, '2006-2009'), + # test with SOURCE_DATE_EPOCH set: copyright year should be updated + ('1293840000', '2006-2011'), + ('1293839999', '2006-2010'), + ], + +) +def expect_date(request, monkeypatch): + sde, expect = request.param + with monkeypatch.context() as m: + if sde: + m.setenv('SOURCE_DATE_EPOCH', sde) + else: + m.delenv('SOURCE_DATE_EPOCH', raising=False) + yield expect + + +@pytest.mark.sphinx('html', testroot='correct-year') +def test_correct_year(expect_date, app): + app.build() + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert expect_date in content diff --git a/tests/test_directive_code.py b/tests/test_directive_code.py new file mode 100644 index 0000000..df7de57 --- /dev/null +++ b/tests/test_directive_code.py @@ -0,0 +1,595 @@ +"""Test the code-block directive.""" + +import os.path + +import pytest +from docutils import nodes + +from sphinx.config import Config +from sphinx.directives.code import LiteralIncludeReader +from sphinx.testing.util import etree_parse + +DUMMY_CONFIG = Config({}, {}) + + +@pytest.fixture(scope='module') +def testroot(rootdir): + testroot_path = rootdir / 'test-directive-code' + return testroot_path + + +@pytest.fixture(scope='module') +def literal_inc_path(testroot): + return testroot / 'literal.inc' + + +def test_LiteralIncludeReader(literal_inc_path): + options = {'lineno-match': True} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + content, lines = reader.read() + assert content == literal_inc_path.read_text(encoding='utf8') + assert lines == 13 + assert reader.lineno_start == 1 + + +def test_LiteralIncludeReader_lineno_start(literal_inc_path): + options = {'lineno-start': 4} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + content, lines = reader.read() + assert content == literal_inc_path.read_text(encoding='utf8') + assert lines == 13 + assert reader.lineno_start == 4 + + +def test_LiteralIncludeReader_pyobject1(literal_inc_path): + options = {'lineno-match': True, 'pyobject': 'Foo'} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + content, lines = reader.read() + assert content == ("class Foo:\n" + " pass\n") + assert reader.lineno_start == 5 + + +def test_LiteralIncludeReader_pyobject2(literal_inc_path): + options = {'pyobject': 'Bar'} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + content, lines = reader.read() + assert content == ("class Bar:\n" + " def baz():\n" + " pass\n") + assert reader.lineno_start == 1 # no lineno-match + + +def test_LiteralIncludeReader_pyobject3(literal_inc_path): + options = {'pyobject': 'Bar.baz'} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + content, lines = reader.read() + assert content == (" def baz():\n" + " pass\n") + + +def test_LiteralIncludeReader_pyobject_and_lines(literal_inc_path): + options = {'pyobject': 'Bar', 'lines': '2-'} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + content, lines = reader.read() + assert content == (" def baz():\n" + " pass\n") + + +def test_LiteralIncludeReader_lines1(literal_inc_path): + options = {'lines': '1-3'} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + content, lines = reader.read() + assert content == ("# Literally included file using Python highlighting\n" + "\n" + "foo = \"Including Unicode characters: üöä\"\n") + + +def test_LiteralIncludeReader_lines2(literal_inc_path): + options = {'lines': '1,3,5'} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + content, lines = reader.read() + assert content == ("# Literally included file using Python highlighting\n" + "foo = \"Including Unicode characters: üöä\"\n" + "class Foo:\n") + + +def test_LiteralIncludeReader_lines_and_lineno_match1(literal_inc_path): + options = {'lines': '3-5', 'lineno-match': True} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + content, lines = reader.read() + assert content == ("foo = \"Including Unicode characters: üöä\"\n" + "\n" + "class Foo:\n") + assert reader.lineno_start == 3 + + +@pytest.mark.sphinx() # init locale for errors +def test_LiteralIncludeReader_lines_and_lineno_match2(literal_inc_path, app, status, warning): + options = {'lines': '0,3,5', 'lineno-match': True} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + with pytest.raises(ValueError, match='Cannot use "lineno-match" with a disjoint set of "lines"'): + reader.read() + + +@pytest.mark.sphinx() # init locale for errors +def test_LiteralIncludeReader_lines_and_lineno_match3(literal_inc_path, app, status, warning): + options = {'lines': '100-', 'lineno-match': True} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + with pytest.raises(ValueError, match="Line spec '100-': no lines pulled from include file"): + reader.read() + + +def test_LiteralIncludeReader_start_at(literal_inc_path): + options = {'lineno-match': True, 'start-at': 'Foo', 'end-at': 'Bar'} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + content, lines = reader.read() + assert content == ("class Foo:\n" + " pass\n" + "\n" + "class Bar:\n") + assert reader.lineno_start == 5 + + +def test_LiteralIncludeReader_start_after(literal_inc_path): + options = {'lineno-match': True, 'start-after': 'Foo', 'end-before': 'Bar'} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + content, lines = reader.read() + assert content == (" pass\n" + "\n") + assert reader.lineno_start == 6 + + +def test_LiteralIncludeReader_start_after_and_lines(literal_inc_path): + options = {'lineno-match': True, 'lines': '6-', + 'start-after': 'Literally', 'end-before': 'comment'} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + content, lines = reader.read() + assert content == ("\n" + "class Bar:\n" + " def baz():\n" + " pass\n" + "\n") + assert reader.lineno_start == 7 + + +def test_LiteralIncludeReader_start_at_and_lines(literal_inc_path): + options = {'lines': '2, 3, 5', 'start-at': 'foo', 'end-before': '#'} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + content, lines = reader.read() + assert content == ("\n" + "class Foo:\n" + "\n") + assert reader.lineno_start == 1 + + +def test_LiteralIncludeReader_missing_start_and_end(literal_inc_path): + options = {'start-at': 'NOTHING'} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + with pytest.raises(ValueError, match='start-at pattern not found: NOTHING'): + reader.read() + + options = {'end-at': 'NOTHING'} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + with pytest.raises(ValueError, match='end-at pattern not found: NOTHING'): + reader.read() + + options = {'start-after': 'NOTHING'} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + with pytest.raises(ValueError, match='start-after pattern not found: NOTHING'): + reader.read() + + options = {'end-before': 'NOTHING'} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + with pytest.raises(ValueError, match='end-before pattern not found: NOTHING'): + reader.read() + + +def test_LiteralIncludeReader_end_before(literal_inc_path): + options = {'end-before': 'nclud'} # *nclud* matches first and third lines. + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + content, lines = reader.read() + assert content == ("# Literally included file using Python highlighting\n" + "\n") + + +def test_LiteralIncludeReader_prepend(literal_inc_path): + options = {'lines': '1', 'prepend': 'Hello', 'append': 'Sphinx'} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + content, lines = reader.read() + assert content == ("Hello\n" + "# Literally included file using Python highlighting\n" + "Sphinx\n") + + +def test_LiteralIncludeReader_dedent(literal_inc_path): + # dedent: 2 + options = {'lines': '9-11', 'dedent': 2} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + content, lines = reader.read() + assert content == (" def baz():\n" + " pass\n" + "\n") + + # dedent: 4 + options = {'lines': '9-11', 'dedent': 4} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + content, lines = reader.read() + assert content == ("def baz():\n" + " pass\n" + "\n") + + # dedent: 6 + options = {'lines': '9-11', 'dedent': 6} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + content, lines = reader.read() + assert content == ("f baz():\n" + " pass\n" + "\n") + + # dedent: None + options = {'lines': '9-11', 'dedent': None} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + content, lines = reader.read() + assert content == ("def baz():\n" + " pass\n" + "\n") + + +def test_LiteralIncludeReader_dedent_and_append_and_prepend(literal_inc_path): + # dedent: 2 + options = {'lines': '9-11', 'dedent': 2, 'prepend': 'class Foo:', 'append': '# comment'} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + content, lines = reader.read() + assert content == ("class Foo:\n" + " def baz():\n" + " pass\n" + "\n" + "# comment\n") + + +def test_LiteralIncludeReader_tabwidth(testroot): + # tab-width: 4 + options = {'tab-width': 4, 'pyobject': 'Qux'} + reader = LiteralIncludeReader(testroot / 'target.py', options, DUMMY_CONFIG) + content, lines = reader.read() + assert content == ("class Qux:\n" + " def quux(self):\n" + " pass\n") + + # tab-width: 8 + options = {'tab-width': 8, 'pyobject': 'Qux'} + reader = LiteralIncludeReader(testroot / 'target.py', options, DUMMY_CONFIG) + content, lines = reader.read() + assert content == ("class Qux:\n" + " def quux(self):\n" + " pass\n") + + +def test_LiteralIncludeReader_tabwidth_dedent(testroot): + options = {'tab-width': 4, 'dedent': 4, 'pyobject': 'Qux.quux'} + reader = LiteralIncludeReader(testroot / 'target.py', options, DUMMY_CONFIG) + content, lines = reader.read() + assert content == ("def quux(self):\n" + " pass\n") + + +def test_LiteralIncludeReader_diff(testroot, literal_inc_path): + options = {'diff': testroot / 'literal-diff.inc'} + reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) + content, lines = reader.read() + assert content == ("--- " + os.path.join(testroot, 'literal-diff.inc') + "\n" + "+++ " + os.path.join(testroot, 'literal.inc') + "\n" + "@@ -6,8 +6,8 @@\n" + " pass\n" + " \n" + " class Bar:\n" + "- def baz(self):\n" + "+ def baz():\n" + " pass\n" + " \n" + "-# comment after Bar class\n" + "+# comment after Bar class definition\n" + " def bar(): pass\n") + + +@pytest.mark.sphinx('xml', testroot='directive-code') +def test_code_block(app, status, warning): + app.builder.build('index') + et = etree_parse(app.outdir / 'index.xml') + secs = et.findall('./section/section') + code_block = secs[0].findall('literal_block') + assert len(code_block) > 0 + actual = code_block[0].text + expect = ( + " def ruby?\n" + + " false\n" + + " end" + ) + assert actual == expect + + +@pytest.mark.sphinx('html', testroot='directive-code') +def test_force_option(app, status, warning): + app.builder.build(['force']) + assert 'force.rst' not in warning.getvalue() + + +@pytest.mark.sphinx('html', testroot='directive-code') +def test_code_block_caption_html(app, status, warning): + app.builder.build(['caption']) + html = (app.outdir / 'caption.html').read_text(encoding='utf8') + caption = ('<div class="code-block-caption">' + '<span class="caption-number">Listing 1 </span>' + '<span class="caption-text">caption <em>test</em> rb' + '</span><a class="headerlink" href="#id1" ' + 'title="Link to this code">\xb6</a></div>') + assert caption in html + + +@pytest.mark.sphinx('latex', testroot='directive-code') +def test_code_block_caption_latex(app, status, warning): + app.builder.build_all() + latex = (app.outdir / 'python.tex').read_text(encoding='utf8') + caption = '\\sphinxSetupCaptionForVerbatim{caption \\sphinxstyleemphasis{test} rb}' + label = '\\def\\sphinxLiteralBlockLabel{\\label{\\detokenize{caption:id1}}}' + link = '\\hyperref[\\detokenize{caption:name-test-rb}]' \ + '{Listing \\ref{\\detokenize{caption:name-test-rb}}}' + assert caption in latex + assert label in latex + assert link in latex + + +@pytest.mark.sphinx('latex', testroot='directive-code') +def test_code_block_namedlink_latex(app, status, warning): + app.builder.build_all() + latex = (app.outdir / 'python.tex').read_text(encoding='utf8') + label1 = '\\def\\sphinxLiteralBlockLabel{\\label{\\detokenize{caption:name-test-rb}}}' + link1 = '\\hyperref[\\detokenize{caption:name-test-rb}]'\ + '{\\sphinxcrossref{\\DUrole{std,std-ref}{Ruby}}' + label2 = ('\\def\\sphinxLiteralBlockLabel' + '{\\label{\\detokenize{namedblocks:some-ruby-code}}}') + link2 = '\\hyperref[\\detokenize{namedblocks:some-ruby-code}]'\ + '{\\sphinxcrossref{\\DUrole{std,std-ref}{the ruby code}}}' + assert label1 in latex + assert link1 in latex + assert label2 in latex + assert link2 in latex + + +@pytest.mark.sphinx('latex', testroot='directive-code') +def test_code_block_emphasize_latex(app, status, warning): + app.builder.build(['emphasize']) + latex = (app.outdir / 'python.tex').read_text(encoding='utf8').replace('\r\n', '\n') + includes = '\\fvset{hllines={, 5, 6, 13, 14, 15, 24, 25, 26,}}%\n' + assert includes in latex + includes = '\\end{sphinxVerbatim}\n\\sphinxresetverbatimhllines\n' + assert includes in latex + + +@pytest.mark.sphinx('xml', testroot='directive-code') +def test_literal_include(app, status, warning): + app.builder.build(['index']) + et = etree_parse(app.outdir / 'index.xml') + secs = et.findall('./section/section') + literal_include = secs[1].findall('literal_block') + literal_src = (app.srcdir / 'literal.inc').read_text(encoding='utf8') + assert len(literal_include) > 0 + actual = literal_include[0].text + assert actual == literal_src + + +@pytest.mark.sphinx('xml', testroot='directive-code') +def test_literal_include_block_start_with_comment_or_brank(app, status, warning): + app.builder.build(['python']) + et = etree_parse(app.outdir / 'python.xml') + secs = et.findall('./section/section') + literal_include = secs[0].findall('literal_block') + assert len(literal_include) > 0 + actual = literal_include[0].text + expect = ( + 'def block_start_with_comment():\n' + ' # Comment\n' + ' return 1\n' + ) + assert actual == expect + + actual = literal_include[1].text + expect = ( + 'def block_start_with_blank():\n' + '\n' + ' return 1\n' + ) + assert actual == expect + + +@pytest.mark.sphinx('html', testroot='directive-code') +def test_literal_include_linenos(app, status, warning): + app.builder.build(['linenos']) + html = (app.outdir / 'linenos.html').read_text(encoding='utf8') + + # :linenos: + assert ('<span class="linenos"> 1</span><span class="c1">' + '# Literally included file using Python highlighting</span>' in html) + + # :lineno-start: + assert ('<span class="linenos">200</span><span class="c1">' + '# Literally included file using Python highlighting</span>' in html) + + # :lines: 5-9 + assert ('<span class="linenos">5</span><span class="k">class</span> ' + '<span class="nc">Foo</span><span class="p">:</span>' in html) + + +@pytest.mark.sphinx('latex', testroot='directive-code') +def test_literalinclude_file_whole_of_emptyline(app, status, warning): + app.builder.build_all() + latex = (app.outdir / 'python.tex').read_text(encoding='utf8').replace('\r\n', '\n') + includes = ( + '\\begin{sphinxVerbatim}' + '[commandchars=\\\\\\{\\},numbers=left,firstnumber=1,stepnumber=1]\n' + '\n' + '\n' + '\n' + '\\end{sphinxVerbatim}\n') + assert includes in latex + + +@pytest.mark.sphinx('html', testroot='directive-code') +def test_literalinclude_caption_html(app, status, warning): + app.builder.build_all() + html = (app.outdir / 'caption.html').read_text(encoding='utf8') + caption = ('<div class="code-block-caption">' + '<span class="caption-number">Listing 2 </span>' + '<span class="caption-text">caption <strong>test</strong> py' + '</span><a class="headerlink" href="#id2" ' + 'title="Link to this code">\xb6</a></div>') + assert caption in html + + +@pytest.mark.sphinx('latex', testroot='directive-code') +def test_literalinclude_caption_latex(app, status, warning): + app.builder.build('index') + latex = (app.outdir / 'python.tex').read_text(encoding='utf8') + caption = '\\sphinxSetupCaptionForVerbatim{caption \\sphinxstylestrong{test} py}' + label = '\\def\\sphinxLiteralBlockLabel{\\label{\\detokenize{caption:id2}}}' + link = '\\hyperref[\\detokenize{caption:name-test-py}]' \ + '{Listing \\ref{\\detokenize{caption:name-test-py}}}' + assert caption in latex + assert label in latex + assert link in latex + + +@pytest.mark.sphinx('latex', testroot='directive-code') +def test_literalinclude_namedlink_latex(app, status, warning): + app.builder.build('index') + latex = (app.outdir / 'python.tex').read_text(encoding='utf8') + label1 = '\\def\\sphinxLiteralBlockLabel{\\label{\\detokenize{caption:name-test-py}}}' + link1 = '\\hyperref[\\detokenize{caption:name-test-py}]'\ + '{\\sphinxcrossref{\\DUrole{std,std-ref}{Python}}' + label2 = ('\\def\\sphinxLiteralBlockLabel' + '{\\label{\\detokenize{namedblocks:some-python-code}}}') + link2 = '\\hyperref[\\detokenize{namedblocks:some-python-code}]'\ + '{\\sphinxcrossref{\\DUrole{std,std-ref}{the python code}}}' + assert label1 in latex + assert link1 in latex + assert label2 in latex + assert link2 in latex + + +@pytest.mark.sphinx('xml', testroot='directive-code') +def test_literalinclude_classes(app, status, warning): + app.builder.build(['classes']) + et = etree_parse(app.outdir / 'classes.xml') + secs = et.findall('./section/section') + + code_block = secs[0].findall('literal_block') + assert len(code_block) > 0 + assert code_block[0].get('classes') == 'foo bar' + assert code_block[0].get('names') == 'code_block' + + literalinclude = secs[1].findall('literal_block') + assert len(literalinclude) > 0 + assert literalinclude[0].get('classes') == 'bar baz' + assert literalinclude[0].get('names') == 'literal_include' + + +@pytest.mark.sphinx('xml', testroot='directive-code') +def test_literalinclude_pydecorators(app, status, warning): + app.builder.build(['py-decorators']) + et = etree_parse(app.outdir / 'py-decorators.xml') + secs = et.findall('./section/section') + + literal_include = secs[0].findall('literal_block') + assert len(literal_include) == 3 + + actual = literal_include[0].text + expect = ( + '@class_decorator\n' + '@other_decorator()\n' + 'class TheClass(object):\n' + '\n' + ' @method_decorator\n' + ' @other_decorator()\n' + ' def the_method():\n' + ' pass\n' + ) + assert actual == expect + + actual = literal_include[1].text + expect = ( + ' @method_decorator\n' + ' @other_decorator()\n' + ' def the_method():\n' + ' pass\n' + ) + assert actual == expect + + actual = literal_include[2].text + expect = ( + '@function_decorator\n' + '@other_decorator()\n' + 'def the_function():\n' + ' pass\n' + ) + assert actual == expect + + +@pytest.mark.sphinx('dummy', testroot='directive-code') +def test_code_block_highlighted(app, status, warning): + app.builder.build(['highlight']) + doctree = app.env.get_doctree('highlight') + codeblocks = list(doctree.findall(nodes.literal_block)) + + assert codeblocks[0]['language'] == 'default' + assert codeblocks[1]['language'] == 'python2' + assert codeblocks[2]['language'] == 'python3' + assert codeblocks[3]['language'] == 'python2' + + +@pytest.mark.sphinx('html', testroot='directive-code') +def test_linenothreshold(app, status, warning): + app.builder.build(['linenothreshold']) + html = (app.outdir / 'linenothreshold.html').read_text(encoding='utf8') + + # code-block using linenothreshold + assert ('<span class="linenos">1</span><span class="k">class</span> ' + '<span class="nc">Foo</span><span class="p">:</span>' in html) + + # code-block not using linenothreshold (no line numbers) + assert '<span></span><span class="c1"># comment</span>' in html + + # literal include using linenothreshold + assert ('<span class="linenos"> 1</span><span class="c1">' + '# Literally included file using Python highlighting</span>' in html) + + # literal include not using linenothreshold (no line numbers) + assert ('<span></span><span class="c1"># Very small literal include ' + '(linenothreshold check)</span>' in html) + + +@pytest.mark.sphinx('dummy', testroot='directive-code') +def test_code_block_dedent(app, status, warning): + app.builder.build(['dedent']) + doctree = app.env.get_doctree('dedent') + codeblocks = list(doctree.findall(nodes.literal_block)) + # Note: comparison string should not have newlines at the beginning or end + text_0_indent = '''First line +Second line + Third line +Fourth line''' + text_2_indent = ''' First line + Second line + Third line + Fourth line''' + text_4_indent = ''' First line + Second line + Third line + Fourth line''' + + assert codeblocks[0].astext() == text_0_indent + assert codeblocks[1].astext() == text_0_indent + assert codeblocks[2].astext() == text_4_indent + assert codeblocks[3].astext() == text_2_indent + assert codeblocks[4].astext() == text_4_indent + assert codeblocks[5].astext() == text_0_indent diff --git a/tests/test_directive_object_description.py b/tests/test_directive_object_description.py new file mode 100644 index 0000000..f2c9f9d --- /dev/null +++ b/tests/test_directive_object_description.py @@ -0,0 +1,59 @@ +"""Test object description directives.""" + +import docutils.utils +import pytest +from docutils import nodes + +from sphinx import addnodes +from sphinx.io import create_publisher +from sphinx.testing import restructuredtext +from sphinx.util.docutils import sphinx_domains + + +def _doctree_for_test(builder, docname: str) -> nodes.document: + builder.env.prepare_settings(docname) + publisher = create_publisher(builder.app, 'restructuredtext') + with sphinx_domains(builder.env): + publisher.set_source(source_path=builder.env.doc2path(docname)) + publisher.publish() + return publisher.document + + +@pytest.mark.sphinx('text', testroot='object-description-sections') +def test_object_description_sections(app): + doctree = _doctree_for_test(app.builder, 'index') + # <document> + # <index> + # <desc> + # <desc_signature> + # <desc_name> + # func + # <desc_parameterlist> + # <desc_content> + # <section> + # <title> + # Overview + # <paragraph> + # Lorem ipsum dolar sit amet + + assert isinstance(doctree[0], addnodes.index) + assert isinstance(doctree[1], addnodes.desc) + assert isinstance(doctree[1][0], addnodes.desc_signature) + assert isinstance(doctree[1][1], addnodes.desc_content) + assert isinstance(doctree[1][1][0], nodes.section) + assert isinstance(doctree[1][1][0][0], nodes.title) + assert doctree[1][1][0][0][0] == 'Overview' + assert isinstance(doctree[1][1][0][1], nodes.paragraph) + assert doctree[1][1][0][1][0] == 'Lorem ipsum dolar sit amet' + + +def test_object_description_content_line_number(app): + text = (".. py:function:: foo(bar)\n" + + "\n" + + " Some link here: :ref:`abc`\n") + doc = restructuredtext.parse(app, text) + xrefs = list(doc.findall(condition=addnodes.pending_xref)) + assert len(xrefs) == 1 + source, line = docutils.utils.get_source_line(xrefs[0]) + assert 'index.rst' in source + assert line == 3 diff --git a/tests/test_directive_only.py b/tests/test_directive_only.py new file mode 100644 index 0000000..2e9ea63 --- /dev/null +++ b/tests/test_directive_only.py @@ -0,0 +1,46 @@ +"""Test the only directive with the test root.""" + +import re + +import pytest +from docutils import nodes + + +@pytest.mark.sphinx('text', testroot='directive-only') +def test_sectioning(app, status, warning): + + def getsects(section): + if not isinstance(section, nodes.section): + return [getsects(n) for n in section.children] + title = section.next_node(nodes.title).astext().strip() + subsects = [] + children = section.children[:] + while children: + node = children.pop(0) + if isinstance(node, nodes.section): + subsects.append(node) + continue + children = list(node.children) + children + return [title, [getsects(subsect) for subsect in subsects]] + + def testsects(prefix, sects, indent=0): + title = sects[0] + parent_num = title.split()[0] + assert prefix == parent_num, \ + 'Section out of place: %r' % title + for i, subsect in enumerate(sects[1]): + num = subsect[0].split()[0] + assert re.match('[0-9]+[.0-9]*[.]', num), \ + 'Unnumbered section: %r' % subsect[0] + testsects(prefix + str(i + 1) + '.', subsect, indent + 4) + + app.builder.build(['only']) + doctree = app.env.get_doctree('only') + app.env.apply_post_transforms(doctree, 'only') + + parts = [getsects(n) + for n in [_n for _n in doctree.children if isinstance(_n, nodes.section)]] + for i, s in enumerate(parts): + testsects(str(i + 1) + '.', s, 4) + assert len(parts) == 4, 'Expected 4 document level headings, got:\n%s' % \ + '\n'.join([p[0] for p in parts]) diff --git a/tests/test_directive_other.py b/tests/test_directive_other.py new file mode 100644 index 0000000..1feb251 --- /dev/null +++ b/tests/test_directive_other.py @@ -0,0 +1,195 @@ +"""Test the other directives.""" +from pathlib import Path + +import pytest +from docutils import nodes + +from sphinx import addnodes +from sphinx.testing import restructuredtext +from sphinx.testing.util import assert_node + + +@pytest.mark.sphinx(testroot='toctree-glob') +def test_toctree(app): + text = (".. toctree::\n" + "\n" + " foo\n" + " bar/index\n" + " baz\n") + + app.env.find_files(app.config, app.builder) + doctree = restructuredtext.parse(app, text, 'index') + assert_node(doctree, [nodes.document, nodes.compound, addnodes.toctree]) + assert_node(doctree[0][0], + entries=[(None, 'foo'), (None, 'bar/index'), (None, 'baz')], + includefiles=['foo', 'bar/index', 'baz']) + + +@pytest.mark.sphinx(testroot='toctree-glob') +def test_relative_toctree(app): + text = (".. toctree::\n" + "\n" + " bar_1\n" + " bar_2\n" + " bar_3\n" + " ../quux\n") + + app.env.find_files(app.config, app.builder) + doctree = restructuredtext.parse(app, text, 'bar/index') + assert_node(doctree, [nodes.document, nodes.compound, addnodes.toctree]) + assert_node(doctree[0][0], + entries=[(None, 'bar/bar_1'), (None, 'bar/bar_2'), (None, 'bar/bar_3'), + (None, 'quux')], + includefiles=['bar/bar_1', 'bar/bar_2', 'bar/bar_3', 'quux']) + + +@pytest.mark.sphinx(testroot='toctree-glob') +def test_toctree_urls_and_titles(app): + text = (".. toctree::\n" + "\n" + " Sphinx <https://www.sphinx-doc.org/>\n" + " https://readthedocs.org/\n" + " The BAR <bar/index>\n") + + app.env.find_files(app.config, app.builder) + doctree = restructuredtext.parse(app, text, 'index') + assert_node(doctree, [nodes.document, nodes.compound, addnodes.toctree]) + assert_node(doctree[0][0], + entries=[('Sphinx', 'https://www.sphinx-doc.org/'), + (None, 'https://readthedocs.org/'), + ('The BAR', 'bar/index')], + includefiles=['bar/index']) + + +@pytest.mark.sphinx(testroot='toctree-glob') +def test_toctree_glob(app): + text = (".. toctree::\n" + " :glob:\n" + "\n" + " *\n") + + app.env.find_files(app.config, app.builder) + doctree = restructuredtext.parse(app, text, 'index') + assert_node(doctree, [nodes.document, nodes.compound, addnodes.toctree]) + assert_node(doctree[0][0], + entries=[(None, 'baz'), (None, 'foo'), (None, 'quux')], + includefiles=['baz', 'foo', 'quux']) + + # give both docname and glob (case1) + text = (".. toctree::\n" + " :glob:\n" + "\n" + " foo\n" + " *\n") + + app.env.find_files(app.config, app.builder) + doctree = restructuredtext.parse(app, text, 'index') + assert_node(doctree, [nodes.document, nodes.compound, addnodes.toctree]) + assert_node(doctree[0][0], + entries=[(None, 'foo'), (None, 'baz'), (None, 'quux')], + includefiles=['foo', 'baz', 'quux']) + + # give both docname and glob (case2) + text = (".. toctree::\n" + " :glob:\n" + "\n" + " *\n" + " foo\n") + + app.env.find_files(app.config, app.builder) + doctree = restructuredtext.parse(app, text, 'index') + assert_node(doctree, [nodes.document, nodes.compound, addnodes.toctree]) + assert_node(doctree[0][0], + entries=[(None, 'baz'), (None, 'foo'), (None, 'quux'), (None, 'foo')], + includefiles=['baz', 'foo', 'quux', 'foo']) + + +@pytest.mark.sphinx(testroot='toctree-glob') +def test_toctree_glob_and_url(app): + text = (".. toctree::\n" + " :glob:\n" + "\n" + " https://example.com/?q=sphinx\n") + + app.env.find_files(app.config, app.builder) + doctree = restructuredtext.parse(app, text, 'index') + assert_node(doctree, [nodes.document, nodes.compound, addnodes.toctree]) + assert_node(doctree[0][0], + entries=[(None, 'https://example.com/?q=sphinx')], + includefiles=[]) + + +@pytest.mark.sphinx(testroot='toctree-glob') +def test_reversed_toctree(app): + text = (".. toctree::\n" + " :reversed:\n" + "\n" + " foo\n" + " bar/index\n" + " baz\n") + + app.env.find_files(app.config, app.builder) + doctree = restructuredtext.parse(app, text, 'index') + assert_node(doctree, [nodes.document, nodes.compound, addnodes.toctree]) + assert_node(doctree[0][0], + entries=[(None, 'baz'), (None, 'bar/index'), (None, 'foo')], + includefiles=['baz', 'bar/index', 'foo']) + + +@pytest.mark.sphinx(testroot='toctree-glob') +def test_toctree_twice(app): + text = (".. toctree::\n" + "\n" + " foo\n" + " foo\n") + + app.env.find_files(app.config, app.builder) + doctree = restructuredtext.parse(app, text, 'index') + assert_node(doctree, [nodes.document, nodes.compound, addnodes.toctree]) + assert_node(doctree[0][0], + entries=[(None, 'foo'), (None, 'foo')], + includefiles=['foo', 'foo']) + + +@pytest.mark.sphinx(testroot='directive-include') +def test_include_include_read_event(app): + sources_reported = [] + + def source_read_handler(_app, relative_path, parent_docname, source): + sources_reported.append((relative_path, parent_docname, source[0])) + + app.connect("include-read", source_read_handler) + text = """\ +.. include:: baz/baz.rst + :start-line: 4 +.. include:: text.txt + :literal: +.. include:: bar.txt +""" + app.env.find_files(app.config, app.builder) + restructuredtext.parse(app, text, 'index') + + included_files = {filename.as_posix() + for filename, p, s in sources_reported} + assert 'index.rst' not in included_files # sources don't emit 'include-read' + assert 'baz/baz.rst' in included_files + assert 'text.txt' not in included_files # text was included as literal, no rst parsing + assert 'bar.txt' in included_files # suffix not in source-suffixes + assert (Path('baz/baz.rst'), 'index', '\nBaz was here.') in sources_reported + + +@pytest.mark.sphinx(testroot='directive-include') +def test_include_include_read_event_nested_includes(app): + + def source_read_handler(_app, _relative_path, _parent_docname, source): + text = source[0].replace("#magical", "amazing") + source[0] = text + + app.connect("include-read", source_read_handler) + text = ".. include:: baz/baz.rst\n" + app.env.find_files(app.config, app.builder) + doctree = restructuredtext.parse(app, text, 'index') + assert_node(doctree, addnodes.document) + assert len(doctree.children) == 3 + assert_node(doctree.children[1], nodes.paragraph) + assert doctree.children[1].rawsource == "The amazing foo." diff --git a/tests/test_directive_patch.py b/tests/test_directive_patch.py new file mode 100644 index 0000000..f4eb8f9 --- /dev/null +++ b/tests/test_directive_patch.py @@ -0,0 +1,110 @@ +"""Test the patched directives.""" + +import pytest +from docutils import nodes + +from sphinx.testing import restructuredtext +from sphinx.testing.util import assert_node + + +def test_code_directive(app): + # normal case + text = ('.. code::\n' + '\n' + ' print("hello world")\n') + + doctree = restructuredtext.parse(app, text) + assert_node(doctree, [nodes.document, nodes.literal_block, 'print("hello world")']) + assert_node(doctree[0], language="default", highlight_args={}) + + # with language + text = ('.. code:: python\n' + '\n' + ' print("hello world")\n') + + doctree = restructuredtext.parse(app, text) + assert_node(doctree, [nodes.document, nodes.literal_block, 'print("hello world")']) + assert_node(doctree[0], language="python", highlight_args={}) + + # :number-lines: option + text = ('.. code:: python\n' + ' :number-lines:\n' + '\n' + ' print("hello world")\n') + + doctree = restructuredtext.parse(app, text) + assert_node(doctree, [nodes.document, nodes.literal_block, 'print("hello world")']) + assert_node(doctree[0], language="python", linenos=True, highlight_args={}) + + # :number-lines: option + text = ('.. code:: python\n' + ' :number-lines: 5\n' + '\n' + ' print("hello world")\n') + + doctree = restructuredtext.parse(app, text) + assert_node(doctree, [nodes.document, nodes.literal_block, 'print("hello world")']) + assert_node(doctree[0], language="python", linenos=True, highlight_args={'linenostart': 5}) + + +@pytest.mark.sphinx(testroot='directive-csv-table') +def test_csv_table_directive(app): + # relative path from current document + text = ('.. csv-table::\n' + ' :file: example.csv\n') + doctree = restructuredtext.parse(app, text, docname="subdir/index") + assert_node(doctree, + ([nodes.table, nodes.tgroup, (nodes.colspec, + nodes.colspec, + nodes.colspec, + [nodes.tbody, nodes.row])],)) + assert_node(doctree[0][0][3][0], + ([nodes.entry, nodes.paragraph, "FOO"], + [nodes.entry, nodes.paragraph, "BAR"], + [nodes.entry, nodes.paragraph, "BAZ"])) + + # absolute path from source directory + text = ('.. csv-table::\n' + ' :file: /example.csv\n') + doctree = restructuredtext.parse(app, text, docname="subdir/index") + assert_node(doctree, + ([nodes.table, nodes.tgroup, (nodes.colspec, + nodes.colspec, + nodes.colspec, + [nodes.tbody, nodes.row])],)) + assert_node(doctree[0][0][3][0], + ([nodes.entry, nodes.paragraph, "foo"], + [nodes.entry, nodes.paragraph, "bar"], + [nodes.entry, nodes.paragraph, "baz"])) + + +def test_math_directive(app): + # normal case + text = '.. math:: E = mc^2' + doctree = restructuredtext.parse(app, text) + assert_node(doctree, [nodes.document, nodes.math_block, 'E = mc^2\n\n']) + + # :name: option + text = ('.. math:: E = mc^2\n' + ' :name: eq1\n') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, [nodes.document, (nodes.target, + [nodes.math_block, "E = mc^2\n\n"])]) + assert_node(doctree[1], nodes.math_block, docname='index', label="eq1", number=1) + + # :label: option + text = ('.. math:: E = mc^2\n' + ' :label: eq2\n') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, [nodes.document, (nodes.target, + [nodes.math_block, 'E = mc^2\n\n'])]) + assert_node(doctree[1], nodes.math_block, docname='index', label="eq2", number=2) + + # :label: option without value + text = ('.. math:: E = mc^2\n' + ' :label:\n') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, [nodes.document, (nodes.target, + [nodes.math_block, 'E = mc^2\n\n'])]) + assert_node(doctree[1], nodes.math_block, ids=['equation-index-0'], + docname='index', label="index:0", number=3) diff --git a/tests/test_directives_no_typesetting.py b/tests/test_directives_no_typesetting.py new file mode 100644 index 0000000..fd101fb --- /dev/null +++ b/tests/test_directives_no_typesetting.py @@ -0,0 +1,108 @@ +"""Tests the directives""" + +import pytest +from docutils import nodes + +from sphinx import addnodes +from sphinx.testing import restructuredtext +from sphinx.testing.util import assert_node + +DOMAINS = [ + # directive, no-index, no-index-entry, signature of f, signature of g, index entry of g + ('c:function', False, True, 'void f()', 'void g()', ('single', 'g (C function)', 'c.g', '', None)), + ('cpp:function', False, True, 'void f()', 'void g()', ('single', 'g (C++ function)', '_CPPv41gv', '', None)), + ('js:function', True, True, 'f()', 'g()', ('single', 'g() (built-in function)', 'g', '', None)), + ('py:function', True, True, 'f()', 'g()', ('pair', 'built-in function; g()', 'g', '', None)), + ('rst:directive', True, False, 'f', 'g', ('single', 'g (directive)', 'directive-g', '', None)), + ('cmdoption', True, False, 'f', 'g', ('pair', 'command line option; g', 'cmdoption-arg-g', '', None)), + ('envvar', True, False, 'f', 'g', ('single', 'environment variable; g', 'envvar-g', '', None)), +] + + +@pytest.mark.parametrize(('directive', 'no_index', 'no_index_entry', 'sig_f', 'sig_g', 'index_g'), DOMAINS) +def test_object_description_no_typesetting(app, directive, no_index, no_index_entry, sig_f, sig_g, index_g): + text = (f'.. {directive}:: {sig_f}\n' + f' :no-typesetting:\n') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, nodes.target)) + + +@pytest.mark.parametrize(('directive', 'no_index', 'no_index_entry', 'sig_f', 'sig_g', 'index_g'), DOMAINS) +def test_object_description_no_typesetting_twice(app, directive, no_index, no_index_entry, sig_f, sig_g, index_g): + text = (f'.. {directive}:: {sig_f}\n' + f' :no-typesetting:\n' + f'.. {directive}:: {sig_g}\n' + f' :no-typesetting:\n') + doctree = restructuredtext.parse(app, text) + # Note that all index nodes come before the target nodes + assert_node(doctree, (addnodes.index, addnodes.index, nodes.target, nodes.target)) + + +@pytest.mark.parametrize(('directive', 'no_index', 'no_index_entry', 'sig_f', 'sig_g', 'index_g'), DOMAINS) +def test_object_description_no_typesetting_noindex_orig(app, directive, no_index, no_index_entry, sig_f, sig_g, index_g): + if not no_index: + pytest.skip(f'{directive} does not support :no-index: option') + text = (f'.. {directive}:: {sig_f}\n' + f' :no-index:\n' + f'.. {directive}:: {sig_g}\n') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, addnodes.desc, addnodes.index, addnodes.desc)) + assert_node(doctree[2], addnodes.index, entries=[index_g]) + + +@pytest.mark.parametrize(('directive', 'no_index', 'no_index_entry', 'sig_f', 'sig_g', 'index_g'), DOMAINS) +def test_object_description_no_typesetting_noindex(app, directive, no_index, no_index_entry, sig_f, sig_g, index_g): + if not no_index: + pytest.skip(f'{directive} does not support :no-index: option') + text = (f'.. {directive}:: {sig_f}\n' + f' :no-index:\n' + f' :no-typesetting:\n' + f'.. {directive}:: {sig_g}\n' + f' :no-typesetting:\n') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, addnodes.index, nodes.target)) + assert_node(doctree[0], addnodes.index, entries=[]) + assert_node(doctree[1], addnodes.index, entries=[index_g]) + + +@pytest.mark.parametrize(('directive', 'no_index', 'no_index_entry', 'sig_f', 'sig_g', 'index_g'), DOMAINS) +def test_object_description_no_typesetting_no_index_entry(app, directive, no_index, no_index_entry, sig_f, sig_g, index_g): + if not no_index_entry: + pytest.skip(f'{directive} does not support :no-index-entry: option') + text = (f'.. {directive}:: {sig_f}\n' + f' :no-index-entry:\n' + f' :no-typesetting:\n' + f'.. {directive}:: {sig_g}\n' + f' :no-typesetting:\n') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, addnodes.index, nodes.target, nodes.target)) + assert_node(doctree[0], addnodes.index, entries=[]) + assert_node(doctree[1], addnodes.index, entries=[index_g]) + + +@pytest.mark.parametrize(('directive', 'no_index', 'no_index_entry', 'sig_f', 'sig_g', 'index_g'), DOMAINS) +def test_object_description_no_typesetting_code(app, directive, no_index, no_index_entry, sig_f, sig_g, index_g): + text = (f'.. {directive}:: {sig_f}\n' + f' :no-typesetting:\n' + f'.. {directive}:: {sig_g}\n' + f' :no-typesetting:\n' + f'.. code::\n' + f'\n' + f' code\n') + doctree = restructuredtext.parse(app, text) + # Note that all index nodes come before the targets + assert_node(doctree, (addnodes.index, addnodes.index, nodes.target, nodes.target, nodes.literal_block)) + + +@pytest.mark.parametrize(('directive', 'no_index', 'no_index_entry', 'sig_f', 'sig_g', 'index_g'), DOMAINS) +def test_object_description_no_typesetting_heading(app, directive, no_index, no_index_entry, sig_f, sig_g, index_g): + text = (f'.. {directive}:: {sig_f}\n' + f' :no-typesetting:\n' + f'.. {directive}:: {sig_g}\n' + f' :no-typesetting:\n' + f'\n' + f'Heading\n' + f'=======\n') + doctree = restructuredtext.parse(app, text) + # Note that all index nodes come before the targets and the heading is floated before those. + assert_node(doctree, (nodes.title, addnodes.index, addnodes.index, nodes.target, nodes.target)) diff --git a/tests/test_docutilsconf.py b/tests/test_docutilsconf.py new file mode 100644 index 0000000..def6cb6 --- /dev/null +++ b/tests/test_docutilsconf.py @@ -0,0 +1,29 @@ +"""Test docutils.conf support for several writers.""" + +import pytest +from docutils import nodes + +from sphinx.testing.util import assert_node +from sphinx.util.docutils import patch_docutils + + +@pytest.mark.sphinx('dummy', testroot='docutilsconf', freshenv=True) +def test_html_with_default_docutilsconf(app, status, warning): + with patch_docutils(app.confdir): + app.build() + + doctree = app.env.get_doctree('index') + assert_node(doctree[0][1], [nodes.paragraph, ("Sphinx ", + [nodes.footnote_reference, "1"])]) + + +@pytest.mark.sphinx('dummy', testroot='docutilsconf', freshenv=True, + docutilsconf=('[restructuredtext parser]\n' + 'trim_footnote_reference_space: true\n')) +def test_html_with_docutilsconf(app, status, warning): + with patch_docutils(app.confdir): + app.build() + + doctree = app.env.get_doctree('index') + assert_node(doctree[0][1], [nodes.paragraph, ("Sphinx", + [nodes.footnote_reference, "1"])]) diff --git a/tests/test_domain_c.py b/tests/test_domain_c.py new file mode 100644 index 0000000..6582a0c --- /dev/null +++ b/tests/test_domain_c.py @@ -0,0 +1,1076 @@ +"""Tests the C Domain""" + +import itertools +import zlib +from xml.etree import ElementTree + +import pytest + +from sphinx import addnodes +from sphinx.addnodes import ( + desc, + desc_content, + desc_name, + desc_parameter, + desc_parameterlist, + desc_sig_name, + desc_sig_space, + desc_signature, + desc_signature_line, + pending_xref, +) +from sphinx.domains.c import ( + DefinitionError, + DefinitionParser, + Symbol, + _id_prefix, + _macroKeywords, + _max_id, +) +from sphinx.ext.intersphinx import load_mappings, normalize_intersphinx_mapping +from sphinx.testing import restructuredtext +from sphinx.testing.util import assert_node +from sphinx.writers.text import STDINDENT + + +class Config: + c_id_attributes = ["id_attr", 'LIGHTGBM_C_EXPORT'] + c_paren_attributes = ["paren_attr"] + c_extra_keywords = _macroKeywords + + +def parse(name, string): + parser = DefinitionParser(string, location=None, config=Config()) + parser.allowFallbackExpressionParsing = False + ast = parser.parse_declaration(name, name) + parser.assert_end() + return ast + + +def _check(name, input, idDict, output, key, asTextOutput): + if key is None: + key = name + key += ' ' + if name in ('function', 'member'): + inputActual = input + outputAst = output + outputAsText = output + else: + inputActual = input.format(key='') + outputAst = output.format(key='') + outputAsText = output.format(key=key) + if asTextOutput is not None: + outputAsText = asTextOutput + + # first a simple check of the AST + ast = parse(name, inputActual) + res = str(ast) + if res != outputAst: + print("") + print("Input: ", input) + print("Result: ", res) + print("Expected: ", outputAst) + raise DefinitionError + rootSymbol = Symbol(None, None, None, None, None) + symbol = rootSymbol.add_declaration(ast, docname="TestDoc", line=42) + parentNode = addnodes.desc() + signode = addnodes.desc_signature(input, '') + parentNode += signode + ast.describe_signature(signode, 'lastIsName', symbol, options={}) + resAsText = parentNode.astext() + if resAsText != outputAsText: + print("") + print("Input: ", input) + print("astext(): ", resAsText) + print("Expected: ", outputAsText) + raise DefinitionError + + idExpected = [None] + for i in range(1, _max_id + 1): + if i in idDict: + idExpected.append(idDict[i]) + else: + idExpected.append(idExpected[i - 1]) + idActual = [None] + for i in range(1, _max_id + 1): + # try: + id = ast.get_id(version=i) + assert id is not None + idActual.append(id[len(_id_prefix[i]):]) + # except NoOldIdError: + # idActual.append(None) + + res = [True] + for i in range(1, _max_id + 1): + res.append(idExpected[i] == idActual[i]) + + if not all(res): + print("input: %s" % input.rjust(20)) + for i in range(1, _max_id + 1): + if res[i]: + continue + print("Error in id version %d." % i) + print("result: %s" % idActual[i]) + print("expected: %s" % idExpected[i]) + # print(rootSymbol.dump(0)) + raise DefinitionError + + +def check(name, input, idDict, output=None, key=None, asTextOutput=None): + if output is None: + output = input + # First, check without semicolon + _check(name, input, idDict, output, key, asTextOutput) + if name != 'macro': + # Second, check with semicolon + _check(name, input + ' ;', idDict, output + ';', key, + asTextOutput + ';' if asTextOutput is not None else None) + + +def test_domain_c_ast_expressions(): + def exprCheck(expr, output=None): + parser = DefinitionParser(expr, location=None, config=Config()) + parser.allowFallbackExpressionParsing = False + ast = parser.parse_expression() + parser.assert_end() + # first a simple check of the AST + if output is None: + output = expr + res = str(ast) + if res != output: + print("") + print("Input: ", input) + print("Result: ", res) + print("Expected: ", output) + raise DefinitionError + displayString = ast.get_display_string() + if res != displayString: + # note: if the expression contains an anon name then this will trigger a falsely + print("") + print("Input: ", expr) + print("Result: ", res) + print("Display: ", displayString) + raise DefinitionError + + # type expressions + exprCheck('int*') + exprCheck('int *const*') + exprCheck('int *volatile*') + exprCheck('int *restrict*') + exprCheck('int *(*)(double)') + exprCheck('const int*') + exprCheck('__int64') + exprCheck('unsigned __int64') + + # actual expressions + + # primary + exprCheck('true') + exprCheck('false') + ints = ['5', '0', '075', '0x0123456789ABCDEF', '0XF', '0b1', '0B1', + "0b0'1'0", "00'1'2", "0x0'1'2", "1'2'3"] + unsignedSuffix = ['', 'u', 'U'] + longSuffix = ['', 'l', 'L', 'll', 'LL'] + for i in ints: + for u in unsignedSuffix: + for l in longSuffix: + expr = i + u + l + exprCheck(expr) + expr = i + l + u + exprCheck(expr) + for suffix in ['', 'f', 'F', 'l', 'L']: + for e in [ + '5e42', '5e+42', '5e-42', + '5.', '5.e42', '5.e+42', '5.e-42', + '.5', '.5e42', '.5e+42', '.5e-42', + '5.0', '5.0e42', '5.0e+42', '5.0e-42', + "1'2'3e7'8'9", "1'2'3.e7'8'9", + ".4'5'6e7'8'9", "1'2'3.4'5'6e7'8'9"]: + expr = e + suffix + exprCheck(expr) + for e in [ + 'ApF', 'Ap+F', 'Ap-F', + 'A.', 'A.pF', 'A.p+F', 'A.p-F', + '.A', '.ApF', '.Ap+F', '.Ap-F', + 'A.B', 'A.BpF', 'A.Bp+F', 'A.Bp-F', + "A'B'Cp1'2'3", "A'B'C.p1'2'3", + ".D'E'Fp1'2'3", "A'B'C.D'E'Fp1'2'3"]: + expr = "0x" + e + suffix + exprCheck(expr) + exprCheck('"abc\\"cba"') # string + # character literals + for p in ['', 'u8', 'u', 'U', 'L']: + exprCheck(p + "'a'") + exprCheck(p + "'\\n'") + exprCheck(p + "'\\012'") + exprCheck(p + "'\\0'") + exprCheck(p + "'\\x0a'") + exprCheck(p + "'\\x0A'") + exprCheck(p + "'\\u0a42'") + exprCheck(p + "'\\u0A42'") + exprCheck(p + "'\\U0001f34c'") + exprCheck(p + "'\\U0001F34C'") + + exprCheck('(5)') + exprCheck('C') + # postfix + exprCheck('A(2)') + exprCheck('A[2]') + exprCheck('a.b.c') + exprCheck('a->b->c') + exprCheck('i++') + exprCheck('i--') + # unary + exprCheck('++5') + exprCheck('--5') + exprCheck('*5') + exprCheck('&5') + exprCheck('+5') + exprCheck('-5') + exprCheck('!5') + exprCheck('not 5') + exprCheck('~5') + exprCheck('compl 5') + exprCheck('sizeof(T)') + exprCheck('sizeof -42') + exprCheck('alignof(T)') + # cast + exprCheck('(int)2') + # binary op + exprCheck('5 || 42') + exprCheck('5 or 42') + exprCheck('5 && 42') + exprCheck('5 and 42') + exprCheck('5 | 42') + exprCheck('5 bitor 42') + exprCheck('5 ^ 42') + exprCheck('5 xor 42') + exprCheck('5 & 42') + exprCheck('5 bitand 42') + # ['==', '!='] + exprCheck('5 == 42') + exprCheck('5 != 42') + exprCheck('5 not_eq 42') + # ['<=', '>=', '<', '>'] + exprCheck('5 <= 42') + exprCheck('5 >= 42') + exprCheck('5 < 42') + exprCheck('5 > 42') + # ['<<', '>>'] + exprCheck('5 << 42') + exprCheck('5 >> 42') + # ['+', '-'] + exprCheck('5 + 42') + exprCheck('5 - 42') + # ['*', '/', '%'] + exprCheck('5 * 42') + exprCheck('5 / 42') + exprCheck('5 % 42') + # ['.*', '->*'] + # conditional + # TODO + # assignment + exprCheck('a = 5') + exprCheck('a *= 5') + exprCheck('a /= 5') + exprCheck('a %= 5') + exprCheck('a += 5') + exprCheck('a -= 5') + exprCheck('a >>= 5') + exprCheck('a <<= 5') + exprCheck('a &= 5') + exprCheck('a and_eq 5') + exprCheck('a ^= 5') + exprCheck('a xor_eq 5') + exprCheck('a |= 5') + exprCheck('a or_eq 5') + + +def test_domain_c_ast_fundamental_types(): + def types(): + def signed(t): + yield t + yield 'signed ' + t + yield 'unsigned ' + t + + # integer types + # ------------- + yield 'void' + yield from ('_Bool', 'bool') + yield from signed('char') + yield from signed('short') + yield from signed('short int') + yield from signed('int') + yield from ('signed', 'unsigned') + yield from signed('long') + yield from signed('long int') + yield from signed('long long') + yield from signed('long long int') + yield from ('__int128', '__uint128') + # extensions + for t in ('__int8', '__int16', '__int32', '__int64', '__int128'): + yield from signed(t) + + # floating point types + # -------------------- + yield from ('_Decimal32', '_Decimal64', '_Decimal128') + for f in ('float', 'double', 'long double'): + yield f + yield from (f + " _Complex", f + " complex") + yield from ("_Complex " + f, "complex " + f) + yield from ("_Imaginary " + f, "imaginary " + f) + # extensions + # https://gcc.gnu.org/onlinedocs/gcc/Floating-Types.html#Floating-Types + yield from ('__float80', '_Float64x', + '__float128', '_Float128', + '__ibm128') + # https://gcc.gnu.org/onlinedocs/gcc/Half-Precision.html#Half-Precision + yield '__fp16' + + # fixed-point types (extension) + # ----------------------------- + # https://gcc.gnu.org/onlinedocs/gcc/Fixed-Point.html#Fixed-Point + for sat in ('', '_Sat '): + for t in ('_Fract', 'fract', '_Accum', 'accum'): + for size in ('short ', '', 'long ', 'long long '): + for tt in signed(size + t): + yield sat + tt + + for t in types(): + input = "{key}%s foo" % t + output = ' '.join(input.split()) + check('type', input, {1: 'foo'}, key='typedef', output=output) + if ' ' in t: + # try permutations of all components + tcs = t.split() + for p in itertools.permutations(tcs): + input = "{key}%s foo" % ' '.join(p) + output = ' '.join(input.split()) + check("type", input, {1: 'foo'}, key='typedef', output=output) + + +def test_domain_c_ast_type_definitions(): + check('type', "{key}T", {1: "T"}) + + check('type', "{key}bool *b", {1: 'b'}, key='typedef') + check('type', "{key}bool *const b", {1: 'b'}, key='typedef') + check('type', "{key}bool *const *b", {1: 'b'}, key='typedef') + check('type', "{key}bool *volatile *b", {1: 'b'}, key='typedef') + check('type', "{key}bool *restrict *b", {1: 'b'}, key='typedef') + check('type', "{key}bool *volatile const b", {1: 'b'}, key='typedef') + check('type', "{key}bool *volatile const b", {1: 'b'}, key='typedef') + check('type', "{key}bool *volatile const *b", {1: 'b'}, key='typedef') + check('type', "{key}bool b[]", {1: 'b'}, key='typedef') + check('type', "{key}long long int foo", {1: 'foo'}, key='typedef') + # test decl specs on right + check('type', "{key}bool const b", {1: 'b'}, key='typedef') + + # from breathe#267 (named function parameters for function pointers + check('type', '{key}void (*gpio_callback_t)(struct device *port, uint32_t pin)', + {1: 'gpio_callback_t'}, key='typedef') + + +def test_domain_c_ast_macro_definitions(): + check('macro', 'M', {1: 'M'}) + check('macro', 'M()', {1: 'M'}) + check('macro', 'M(arg)', {1: 'M'}) + check('macro', 'M(arg1, arg2)', {1: 'M'}) + check('macro', 'M(arg1, arg2, arg3)', {1: 'M'}) + check('macro', 'M(...)', {1: 'M'}) + check('macro', 'M(arg, ...)', {1: 'M'}) + check('macro', 'M(arg1, arg2, ...)', {1: 'M'}) + check('macro', 'M(arg1, arg2, arg3, ...)', {1: 'M'}) + # GNU extension + check('macro', 'M(arg1, arg2, arg3...)', {1: 'M'}) + with pytest.raises(DefinitionError): + check('macro', 'M(arg1, arg2..., arg3)', {1: 'M'}) + + +def test_domain_c_ast_member_definitions(): + check('member', 'void a', {1: 'a'}) + check('member', '_Bool a', {1: 'a'}) + check('member', 'bool a', {1: 'a'}) + check('member', 'char a', {1: 'a'}) + check('member', 'int a', {1: 'a'}) + check('member', 'float a', {1: 'a'}) + check('member', 'double a', {1: 'a'}) + + check('member', 'unsigned long a', {1: 'a'}) + check('member', '__int64 a', {1: 'a'}) + check('member', 'unsigned __int64 a', {1: 'a'}) + + check('member', 'int .a', {1: 'a'}) + + check('member', 'int *a', {1: 'a'}) + check('member', 'int **a', {1: 'a'}) + check('member', 'const int a', {1: 'a'}) + check('member', 'volatile int a', {1: 'a'}) + check('member', 'restrict int a', {1: 'a'}) + check('member', 'volatile const int a', {1: 'a'}) + check('member', 'restrict const int a', {1: 'a'}) + check('member', 'restrict volatile int a', {1: 'a'}) + check('member', 'restrict volatile const int a', {1: 'a'}) + + check('member', 'T t', {1: 't'}) + + check('member', 'int a[]', {1: 'a'}) + + check('member', 'int (*p)[]', {1: 'p'}) + + check('member', 'int a[42]', {1: 'a'}) + check('member', 'int a = 42', {1: 'a'}) + check('member', 'T a = {}', {1: 'a'}) + check('member', 'T a = {1}', {1: 'a'}) + check('member', 'T a = {1, 2}', {1: 'a'}) + check('member', 'T a = {1, 2, 3}', {1: 'a'}) + + # test from issue #1539 + check('member', 'CK_UTF8CHAR model[16]', {1: 'model'}) + + check('member', 'auto int a', {1: 'a'}) + check('member', 'register int a', {1: 'a'}) + check('member', 'extern int a', {1: 'a'}) + check('member', 'static int a', {1: 'a'}) + + check('member', 'thread_local int a', {1: 'a'}) + check('member', '_Thread_local int a', {1: 'a'}) + check('member', 'extern thread_local int a', {1: 'a'}) + check('member', 'thread_local extern int a', {1: 'a'}, + 'extern thread_local int a') + check('member', 'static thread_local int a', {1: 'a'}) + check('member', 'thread_local static int a', {1: 'a'}, + 'static thread_local int a') + + check('member', 'int b : 3', {1: 'b'}) + + +def test_domain_c_ast_function_definitions(): + check('function', 'void f()', {1: 'f'}) + check('function', 'void f(int)', {1: 'f'}) + check('function', 'void f(int i)', {1: 'f'}) + check('function', 'void f(int i, int j)', {1: 'f'}) + check('function', 'void f(...)', {1: 'f'}) + check('function', 'void f(int i, ...)', {1: 'f'}) + check('function', 'void f(struct T)', {1: 'f'}) + check('function', 'void f(struct T t)', {1: 'f'}) + check('function', 'void f(union T)', {1: 'f'}) + check('function', 'void f(union T t)', {1: 'f'}) + check('function', 'void f(enum T)', {1: 'f'}) + check('function', 'void f(enum T t)', {1: 'f'}) + + # test from issue #1539 + check('function', 'void f(A x[])', {1: 'f'}) + + # test from issue #2377 + check('function', 'void (*signal(int sig, void (*func)(int)))(int)', {1: 'signal'}) + + check('function', 'extern void f()', {1: 'f'}) + check('function', 'static void f()', {1: 'f'}) + check('function', 'inline void f()', {1: 'f'}) + + # tests derived from issue #1753 (skip to keep sanity) + check('function', "void f(float *q(double))", {1: 'f'}) + check('function', "void f(float *(*q)(double))", {1: 'f'}) + check('function', "void f(float (*q)(double))", {1: 'f'}) + check('function', "int (*f(double d))(float)", {1: 'f'}) + check('function', "int (*f(bool b))[5]", {1: 'f'}) + check('function', "void f(int *const p)", {1: 'f'}) + check('function', "void f(int *volatile const p)", {1: 'f'}) + + # from breathe#223 + check('function', 'void f(struct E e)', {1: 'f'}) + check('function', 'void f(enum E e)', {1: 'f'}) + check('function', 'void f(union E e)', {1: 'f'}) + + # array declarators + check('function', 'void f(int arr[])', {1: 'f'}) + check('function', 'void f(int arr[*])', {1: 'f'}) + cvrs = ['', 'const', 'volatile', 'restrict', 'restrict volatile const'] + for cvr in cvrs: + space = ' ' if len(cvr) != 0 else '' + check('function', f'void f(int arr[{cvr}*])', {1: 'f'}) + check('function', f'void f(int arr[{cvr}])', {1: 'f'}) + check('function', f'void f(int arr[{cvr}{space}42])', {1: 'f'}) + check('function', f'void f(int arr[static{space}{cvr} 42])', {1: 'f'}) + check('function', f'void f(int arr[{cvr}{space}static 42])', {1: 'f'}, + output=f'void f(int arr[static{space}{cvr} 42])') + check('function', 'void f(int arr[const static volatile 42])', {1: 'f'}, + output='void f(int arr[static volatile const 42])') + + with pytest.raises(DefinitionError): + parse('function', 'void f(int for)') + + # from #8960 + check('function', 'void f(void (*p)(int, double), int i)', {1: 'f'}) + + +def test_domain_c_ast_nested_name(): + check('struct', '{key}.A', {1: "A"}) + check('struct', '{key}.A.B', {1: "A.B"}) + check('function', 'void f(.A a)', {1: "f"}) + check('function', 'void f(.A.B a)', {1: "f"}) + + +def test_domain_c_ast_struct_definitions(): + check('struct', '{key}A', {1: 'A'}) + + +def test_domain_c_ast_union_definitions(): + check('union', '{key}A', {1: 'A'}) + + +def test_domain_c_ast_enum_definitions(): + check('enum', '{key}A', {1: 'A'}) + + check('enumerator', '{key}A', {1: 'A'}) + check('enumerator', '{key}A = 42', {1: 'A'}) + + +def test_domain_c_ast_anon_definitions(): + check('struct', '@a', {1: "@a"}, asTextOutput='struct [anonymous]') + check('union', '@a', {1: "@a"}, asTextOutput='union [anonymous]') + check('enum', '@a', {1: "@a"}, asTextOutput='enum [anonymous]') + check('struct', '@1', {1: "@1"}, asTextOutput='struct [anonymous]') + check('struct', '@a.A', {1: "@a.A"}, asTextOutput='struct [anonymous].A') + + +def test_domain_c_ast_initializers(): + idsMember = {1: 'v'} + idsFunction = {1: 'f'} + # no init + check('member', 'T v', idsMember) + check('function', 'void f(T v)', idsFunction) + # with '=', assignment-expression + check('member', 'T v = 42', idsMember) + check('function', 'void f(T v = 42)', idsFunction) + # with '=', braced-init + check('member', 'T v = {}', idsMember) + check('function', 'void f(T v = {})', idsFunction) + check('member', 'T v = {42, 42, 42}', idsMember) + check('function', 'void f(T v = {42, 42, 42})', idsFunction) + check('member', 'T v = {42, 42, 42,}', idsMember) + check('function', 'void f(T v = {42, 42, 42,})', idsFunction) + # TODO: designator-list + + +def test_domain_c_ast_attributes(): + # style: C++ + check('member', '[[]] int f', {1: 'f'}) + check('member', '[ [ ] ] int f', {1: 'f'}, + # this will fail when the proper grammar is implemented + output='[[ ]] int f') + check('member', '[[a]] int f', {1: 'f'}) + # style: GNU + check('member', '__attribute__(()) int f', {1: 'f'}) + check('member', '__attribute__((a)) int f', {1: 'f'}) + check('member', '__attribute__((a, b)) int f', {1: 'f'}) + check('member', '__attribute__((optimize(3))) int f', {1: 'f'}) + check('member', '__attribute__((format(printf, 1, 2))) int f', {1: 'f'}) + # style: user-defined id + check('member', 'id_attr int f', {1: 'f'}) + # style: user-defined paren + check('member', 'paren_attr() int f', {1: 'f'}) + check('member', 'paren_attr(a) int f', {1: 'f'}) + check('member', 'paren_attr("") int f', {1: 'f'}) + check('member', 'paren_attr(()[{}][]{}) int f', {1: 'f'}) + with pytest.raises(DefinitionError): + parse('member', 'paren_attr(() int f') + with pytest.raises(DefinitionError): + parse('member', 'paren_attr([) int f') + with pytest.raises(DefinitionError): + parse('member', 'paren_attr({) int f') + with pytest.raises(DefinitionError): + parse('member', 'paren_attr([)]) int f') + with pytest.raises(DefinitionError): + parse('member', 'paren_attr((])) int f') + with pytest.raises(DefinitionError): + parse('member', 'paren_attr({]}) int f') + + # position: decl specs + check('function', 'static inline __attribute__(()) void f()', {1: 'f'}, + output='__attribute__(()) static inline void f()') + check('function', '[[attr1]] [[attr2]] void f()', {1: 'f'}) + # position: declarator + check('member', 'int *[[attr1]] [[attr2]] i', {1: 'i'}) + check('member', 'int *const [[attr1]] [[attr2]] volatile i', {1: 'i'}, + output='int *[[attr1]] [[attr2]] volatile const i') + check('member', 'int *[[attr1]] [[attr2]] *i', {1: 'i'}) + # position: parameters + check('function', 'void f() [[attr1]] [[attr2]]', {1: 'f'}) + + # position: enumerator + check('enumerator', '{key}Foo [[attr1]] [[attr2]]', {1: 'Foo'}) + check('enumerator', '{key}Foo [[attr1]] [[attr2]] = 42', {1: 'Foo'}) + + # issue michaeljones/breathe#500 + check('function', 'LIGHTGBM_C_EXPORT int LGBM_BoosterFree(int handle)', + {1: 'LGBM_BoosterFree'}) + + +def test_extra_keywords(): + with pytest.raises(DefinitionError, + match='Expected identifier in nested name'): + parse('function', 'void complex(void)') + + +# def test_print(): +# # used for getting all the ids out for checking +# for a in ids: +# print(a) +# raise DefinitionError + + +def split_warnigns(warning): + ws = warning.getvalue().split("\n") + assert len(ws) >= 1 + assert ws[-1] == "" + return ws[:-1] + + +def filter_warnings(warning, file): + lines = split_warnigns(warning) + res = [l for l in lines if "domain-c" in l and f"{file}.rst" in l and + "WARNING: document isn't included in any toctree" not in l] + print(f"Filtered warnings for file '{file}':") + for w in res: + print(w) + return res + + +def extract_role_links(app, filename): + t = (app.outdir / filename).read_text(encoding='utf8') + lis = [l for l in t.split('\n') if l.startswith("<li")] + entries = [] + for l in lis: + li = ElementTree.fromstring(l) # NoQA: S314 # using known data in tests + aList = list(li.iter('a')) + assert len(aList) == 1 + a = aList[0] + target = a.attrib['href'].lstrip('#') + title = a.attrib['title'] + assert len(a) == 1 + code = a[0] + assert code.tag == 'code' + text = ''.join(code.itertext()) + entries.append((target, title, text)) + return entries + + +@pytest.mark.sphinx(testroot='domain-c', confoverrides={'nitpicky': True}) +def test_domain_c_build(app, status, warning): + app.builder.build_all() + ws = filter_warnings(warning, "index") + assert len(ws) == 0 + + +@pytest.mark.sphinx(testroot='domain-c', confoverrides={'nitpicky': True}) +def test_domain_c_build_namespace(app, status, warning): + app.builder.build_all() + ws = filter_warnings(warning, "namespace") + assert len(ws) == 0 + t = (app.outdir / "namespace.html").read_text(encoding='utf8') + for id_ in ('NS.NSVar', 'NULLVar', 'ZeroVar', 'NS2.NS3.NS2NS3Var', 'PopVar'): + assert f'id="c.{id_}"' in t + + +@pytest.mark.sphinx(testroot='domain-c', confoverrides={'nitpicky': True}) +def test_domain_c_build_anon_dup_decl(app, status, warning): + app.builder.build_all() + ws = filter_warnings(warning, "anon-dup-decl") + assert len(ws) == 2 + assert "WARNING: c:identifier reference target not found: @a" in ws[0] + assert "WARNING: c:identifier reference target not found: @b" in ws[1] + + +@pytest.mark.sphinx(confoverrides={'nitpicky': True}) +def test_domain_c_build_semicolon(app, warning): + text = """ +.. c:member:: int member; +.. c:var:: int var; +.. c:function:: void f(); +.. .. c:macro:: NO_SEMICOLON; +.. c:struct:: Struct; +.. c:union:: Union; +.. c:enum:: Enum; +.. c:enumerator:: Enumerator; +.. c:type:: Type; +.. c:type:: int TypeDef; +""" + restructuredtext.parse(app, text) + ws = split_warnigns(warning) + assert len(ws) == 0 + + +@pytest.mark.sphinx(testroot='domain-c', confoverrides={'nitpicky': True}) +def test_domain_c_build_function_param_target(app, warning): + # the anchor for function parameters should be the function + app.builder.build_all() + ws = filter_warnings(warning, "function_param_target") + assert len(ws) == 0 + entries = extract_role_links(app, "function_param_target.html") + assert entries == [ + ('c.function_param_target.f', 'i', 'i'), + ('c.function_param_target.f', 'f.i', 'f.i'), + ] + + +@pytest.mark.sphinx(testroot='domain-c', confoverrides={'nitpicky': True}) +def test_domain_c_build_ns_lookup(app, warning): + app.builder.build_all() + ws = filter_warnings(warning, "ns_lookup") + assert len(ws) == 0 + + +@pytest.mark.sphinx(testroot='domain-c', confoverrides={'nitpicky': True}) +def test_domain_c_build_field_role(app, status, warning): + app.builder.build_all() + ws = filter_warnings(warning, "field-role") + assert len(ws) == 0 + + +def _get_obj(app, queryName): + domain = app.env.get_domain('c') + for name, _dispname, objectType, docname, anchor, _prio in domain.get_objects(): + if name == queryName: + return (docname, anchor, objectType) + return (queryName, "not", "found") + + +@pytest.mark.sphinx(testroot='domain-c-intersphinx', confoverrides={'nitpicky': True}) +def test_domain_c_build_intersphinx(tmp_path, app, status, warning): + # a splitting of test_ids_vs_tags0 into the primary directives in a remote project, + # and then the references in the test project + origSource = """\ +.. c:member:: int _member +.. c:var:: int _var +.. c:function:: void _function() +.. c:macro:: _macro +.. c:struct:: _struct +.. c:union:: _union +.. c:enum:: _enum + + .. c:enumerator:: _enumerator + +.. c:type:: _type +.. c:function:: void _functionParam(int param) +""" # noqa: F841 + inv_file = tmp_path / 'inventory' + inv_file.write_bytes(b'''\ +# Sphinx inventory version 2 +# Project: C Intersphinx Test +# Version: +# The remainder of this file is compressed using zlib. +''' + zlib.compress(b'''\ +_enum c:enum 1 index.html#c.$ - +_enum._enumerator c:enumerator 1 index.html#c.$ - +_enumerator c:enumerator 1 index.html#c._enum.$ - +_function c:function 1 index.html#c.$ - +_functionParam c:function 1 index.html#c.$ - +_functionParam.param c:functionParam 1 index.html#c._functionParam - +_macro c:macro 1 index.html#c.$ - +_member c:member 1 index.html#c.$ - +_struct c:struct 1 index.html#c.$ - +_type c:type 1 index.html#c.$ - +_union c:union 1 index.html#c.$ - +_var c:member 1 index.html#c.$ - +''')) # noqa: W291 + app.config.intersphinx_mapping = { + 'https://localhost/intersphinx/c/': str(inv_file), + } + app.config.intersphinx_cache_limit = 0 + # load the inventory and check if it's done correctly + normalize_intersphinx_mapping(app, app.config) + load_mappings(app) + + app.builder.build_all() + ws = filter_warnings(warning, "index") + assert len(ws) == 0 + + +def test_domain_c_parse_cfunction(app): + text = (".. c:function:: PyObject* " + "PyType_GenericAlloc(PyTypeObject *type, Py_ssize_t nitems)") + doctree = restructuredtext.parse(app, text) + assert_node(doctree[1], addnodes.desc, desctype="function", + domain="c", objtype="function", no_index=False) + + entry = _get_obj(app, 'PyType_GenericAlloc') + assert entry == ('index', 'c.PyType_GenericAlloc', 'function') + + +def test_domain_c_parse_cmember(app): + text = ".. c:member:: PyObject* PyTypeObject.tp_bases" + doctree = restructuredtext.parse(app, text) + assert_node(doctree[1], addnodes.desc, desctype="member", + domain="c", objtype="member", no_index=False) + + entry = _get_obj(app, 'PyTypeObject.tp_bases') + assert entry == ('index', 'c.PyTypeObject.tp_bases', 'member') + + +def test_domain_c_parse_cvar(app): + text = ".. c:var:: PyObject* PyClass_Type" + doctree = restructuredtext.parse(app, text) + assert_node(doctree[1], addnodes.desc, desctype="var", + domain="c", objtype="var", no_index=False) + + entry = _get_obj(app, 'PyClass_Type') + assert entry == ('index', 'c.PyClass_Type', 'member') + + +def test_domain_c_parse_no_index_entry(app): + text = (".. c:function:: void f()\n" + ".. c:function:: void g()\n" + " :no-index-entry:\n") + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, desc, addnodes.index, desc)) + assert_node(doctree[0], addnodes.index, entries=[('single', 'f (C function)', 'c.f', '', None)]) + assert_node(doctree[2], addnodes.index, entries=[]) + + +@pytest.mark.sphinx('html', confoverrides={ + 'c_maximum_signature_line_length': len("str hello(str name)"), +}) +def test_cfunction_signature_with_c_maximum_signature_line_length_equal(app): + text = ".. c:function:: str hello(str name)" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_signature_line, ( + pending_xref, + desc_sig_space, + [desc_name, [desc_sig_name, "hello"]], + desc_parameterlist, + )], + )], + desc_content, + )], + )) + assert_node(doctree[1], addnodes.desc, desctype="function", + domain="c", objtype="function", no_index=False) + assert_node(doctree[1][0][0][3], [desc_parameterlist, desc_parameter, ( + [pending_xref, [desc_sig_name, "str"]], + desc_sig_space, + [desc_sig_name, "name"], + )]) + assert_node(doctree[1][0][0][3], desc_parameterlist, multi_line_parameter_list=False) + + +@pytest.mark.sphinx('html', confoverrides={ + 'c_maximum_signature_line_length': len("str hello(str name)"), +}) +def test_cfunction_signature_with_c_maximum_signature_line_length_force_single(app): + text = (".. c:function:: str hello(str names)\n" + " :single-line-parameter-list:") + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_signature_line, ( + pending_xref, + desc_sig_space, + [desc_name, [desc_sig_name, "hello"]], + desc_parameterlist, + )], + )], + desc_content, + )], + )) + assert_node(doctree[1], addnodes.desc, desctype="function", + domain="c", objtype="function", no_index=False) + assert_node(doctree[1][0][0][3], [desc_parameterlist, desc_parameter, ( + [pending_xref, [desc_sig_name, "str"]], + desc_sig_space, + [desc_sig_name, "names"], + )]) + assert_node(doctree[1][0][0][3], desc_parameterlist, multi_line_parameter_list=False) + + +@pytest.mark.sphinx('html', confoverrides={ + 'c_maximum_signature_line_length': len("str hello(str name)"), +}) +def test_cfunction_signature_with_c_maximum_signature_line_length_break(app): + text = ".. c:function:: str hello(str names)" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_signature_line, ( + pending_xref, + desc_sig_space, + [desc_name, [desc_sig_name, "hello"]], + desc_parameterlist, + )], + )], + desc_content, + )], + )) + assert_node(doctree[1], addnodes.desc, desctype="function", + domain="c", objtype="function", no_index=False) + assert_node(doctree[1][0][0][3], [desc_parameterlist, desc_parameter, ( + [pending_xref, [desc_sig_name, "str"]], + desc_sig_space, + [desc_sig_name, "names"], + )]) + assert_node(doctree[1][0][0][3], desc_parameterlist, multi_line_parameter_list=True) + + +@pytest.mark.sphinx('html', confoverrides={ + 'maximum_signature_line_length': len("str hello(str name)"), +}) +def test_cfunction_signature_with_maximum_signature_line_length_equal(app): + text = ".. c:function:: str hello(str name)" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_signature_line, ( + pending_xref, + desc_sig_space, + [desc_name, [desc_sig_name, "hello"]], + desc_parameterlist, + )], + )], + desc_content, + )], + )) + assert_node(doctree[1], addnodes.desc, desctype="function", + domain="c", objtype="function", no_index=False) + assert_node(doctree[1][0][0][3], [desc_parameterlist, desc_parameter, ( + [pending_xref, [desc_sig_name, "str"]], + desc_sig_space, + [desc_sig_name, "name"], + )]) + assert_node(doctree[1][0][0][3], desc_parameterlist, multi_line_parameter_list=False) + + +@pytest.mark.sphinx('html', confoverrides={ + 'maximum_signature_line_length': len("str hello(str name)"), +}) +def test_cfunction_signature_with_maximum_signature_line_length_force_single(app): + text = (".. c:function:: str hello(str names)\n" + " :single-line-parameter-list:") + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_signature_line, ( + pending_xref, + desc_sig_space, + [desc_name, [desc_sig_name, "hello"]], + desc_parameterlist, + )], + )], + desc_content, + )], + )) + assert_node(doctree[1], addnodes.desc, desctype="function", + domain="c", objtype="function", no_index=False) + assert_node(doctree[1][0][0][3], [desc_parameterlist, desc_parameter, ( + [pending_xref, [desc_sig_name, "str"]], + desc_sig_space, + [desc_sig_name, "names"], + )]) + assert_node(doctree[1][0][0][3], desc_parameterlist, multi_line_parameter_list=False) + + +@pytest.mark.sphinx('html', confoverrides={ + 'maximum_signature_line_length': len("str hello(str name)"), +}) +def test_cfunction_signature_with_maximum_signature_line_length_break(app): + text = ".. c:function:: str hello(str names)" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_signature_line, ( + pending_xref, + desc_sig_space, + [desc_name, [desc_sig_name, "hello"]], + desc_parameterlist, + )], + )], + desc_content, + )], + )) + assert_node(doctree[1], addnodes.desc, desctype="function", + domain="c", objtype="function", no_index=False) + assert_node(doctree[1][0][0][3], [desc_parameterlist, desc_parameter, ( + [pending_xref, [desc_sig_name, "str"]], + desc_sig_space, + [desc_sig_name, "names"], + )]) + assert_node(doctree[1][0][0][3], desc_parameterlist, multi_line_parameter_list=True) + + +@pytest.mark.sphinx('html', confoverrides={ + 'c_maximum_signature_line_length': len('str hello(str name)'), + 'maximum_signature_line_length': 1, +}) +def test_c_maximum_signature_line_length_overrides_global(app): + text = '.. c:function:: str hello(str name)' + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_signature_line, ( + pending_xref, + desc_sig_space, + [desc_name, [desc_sig_name, 'hello']], + desc_parameterlist, + )] + )], + desc_content, + )], + )) + assert_node(doctree[1], addnodes.desc, desctype='function', + domain='c', objtype='function', no_index=False) + assert_node(doctree[1][0][0][3], [desc_parameterlist, desc_parameter, ( + [pending_xref, [desc_sig_name, "str"]], + desc_sig_space, + [desc_sig_name, 'name'], + )]) + assert_node(doctree[1][0][0][3], desc_parameterlist, multi_line_parameter_list=False) + + +@pytest.mark.sphinx('html', testroot='domain-c-c_maximum_signature_line_length') +def test_domain_c_c_maximum_signature_line_length_in_html(app, status, warning): + app.build() + content = (app.outdir / 'index.html').read_text(encoding='utf-8') + expected = """\ + +<dl> +<dd>\ +<span class="n"><span class="pre">str</span></span>\ +<span class="w"> </span>\ +<span class="n"><span class="pre">name</span></span>,\ +</dd> +</dl> + +<span class="sig-paren">)</span>\ +<a class="headerlink" href="#c.hello" title="Link to this definition">¶</a>\ +<br />\ +</dt> +""" + assert expected in content + + +@pytest.mark.sphinx( + 'text', testroot='domain-c-c_maximum_signature_line_length', +) +def test_domain_c_c_maximum_signature_line_length_in_text(app, status, warning): + app.build() + content = (app.outdir / 'index.txt').read_text(encoding='utf8') + param_line_fmt = STDINDENT * " " + "{}\n" + + expected_parameter_list_hello = "(\n{})".format(param_line_fmt.format("str name,")) + + assert expected_parameter_list_hello in content diff --git a/tests/test_domain_cpp.py b/tests/test_domain_cpp.py new file mode 100644 index 0000000..dcc2b0f --- /dev/null +++ b/tests/test_domain_cpp.py @@ -0,0 +1,1744 @@ +"""Tests the C++ Domain""" + +import itertools +import re +import zlib + +import pytest + +import sphinx.domains.cpp +from sphinx import addnodes +from sphinx.addnodes import ( + desc, + desc_content, + desc_name, + desc_parameter, + desc_parameterlist, + desc_sig_name, + desc_sig_space, + desc_signature, + desc_signature_line, + pending_xref, +) +from sphinx.domains.cpp import ( + DefinitionError, + DefinitionParser, + NoOldIdError, + Symbol, + _id_prefix, + _max_id, +) +from sphinx.ext.intersphinx import load_mappings, normalize_intersphinx_mapping +from sphinx.testing import restructuredtext +from sphinx.testing.util import assert_node +from sphinx.writers.text import STDINDENT + + +def parse(name, string): + class Config: + cpp_id_attributes = ["id_attr"] + cpp_paren_attributes = ["paren_attr"] + parser = DefinitionParser(string, location=None, config=Config()) + parser.allowFallbackExpressionParsing = False + ast = parser.parse_declaration(name, name) + parser.assert_end() + # The scopedness would usually have been set by CPPEnumObject + if name == "enum": + ast.scoped = None # simulate unscoped enum + return ast + + +def _check(name, input, idDict, output, key, asTextOutput): + if key is None: + key = name + key += ' ' + if name in ('function', 'member'): + inputActual = input + outputAst = output + outputAsText = output + else: + inputActual = input.format(key='') + outputAst = output.format(key='') + outputAsText = output.format(key=key) + if asTextOutput is not None: + outputAsText = asTextOutput + + # first a simple check of the AST + ast = parse(name, inputActual) + res = str(ast) + if res != outputAst: + print("") + print("Input: ", input) + print("Result: ", res) + print("Expected: ", outputAst) + raise DefinitionError + rootSymbol = Symbol(None, None, None, None, None, None, None) + symbol = rootSymbol.add_declaration(ast, docname="TestDoc", line=42) + parentNode = addnodes.desc() + signode = addnodes.desc_signature(input, '') + parentNode += signode + ast.describe_signature(signode, 'lastIsName', symbol, options={}) + resAsText = parentNode.astext() + if resAsText != outputAsText: + print("") + print("Input: ", input) + print("astext(): ", resAsText) + print("Expected: ", outputAsText) + print("Node:", parentNode) + raise DefinitionError + + idExpected = [None] + for i in range(1, _max_id + 1): + if i in idDict: + idExpected.append(idDict[i]) + else: + idExpected.append(idExpected[i - 1]) + idActual = [None] + for i in range(1, _max_id + 1): + try: + id = ast.get_id(version=i) + assert id is not None + idActual.append(id[len(_id_prefix[i]):]) + except NoOldIdError: + idActual.append(None) + + res = [True] + for i in range(1, _max_id + 1): + res.append(idExpected[i] == idActual[i]) + + if not all(res): + print("input: %s" % input.rjust(20)) + for i in range(1, _max_id + 1): + if res[i]: + continue + print("Error in id version %d." % i) + print("result: %s" % idActual[i]) + print("expected: %s" % idExpected[i]) + print(rootSymbol.dump(0)) + raise DefinitionError + + +def check(name, input, idDict, output=None, key=None, asTextOutput=None): + if output is None: + output = input + # First, check without semicolon + _check(name, input, idDict, output, key, asTextOutput) + # Second, check with semicolon + _check(name, input + ' ;', idDict, output + ';', key, + asTextOutput + ';' if asTextOutput is not None else None) + + +@pytest.mark.parametrize(('type_', 'id_v2'), + sphinx.domains.cpp._id_fundamental_v2.items()) +def test_domain_cpp_ast_fundamental_types(type_, id_v2): + # see https://en.cppreference.com/w/cpp/language/types + def make_id_v1(): + if type_ == 'decltype(auto)': + return None + id_ = type_.replace(" ", "-").replace("long", "l") + if "__int" not in type_: + id_ = id_.replace("int", "i") + id_ = id_.replace("bool", "b").replace("char", "c") + id_ = id_.replace("wc_t", "wchar_t").replace("c16_t", "char16_t") + id_ = id_.replace("c8_t", "char8_t") + id_ = id_.replace("c32_t", "char32_t") + return f"f__{id_}" + + def make_id_v2(): + id_ = id_v2 + if type_ == "std::nullptr_t": + id_ = "NSt9nullptr_tE" + return f"1f{id_}" + + id1 = make_id_v1() + id2 = make_id_v2() + + input = f"void f({type_.replace(' ', ' ')} arg)" + output = f"void f({type_} arg)" + + check("function", input, {1: id1, 2: id2}, output=output) + if ' ' in type_: + # try permutations of all components + tcs = type_.split() + for p in itertools.permutations(tcs): + input = f"void f({' '.join(p)} arg)" + check("function", input, {1: id1, 2: id2}) + + +def test_domain_cpp_ast_expressions(): + def exprCheck(expr, id, id4=None): + ids = 'IE1CIA%s_1aE' + # call .format() on the expr to unescape double curly braces + idDict = {2: ids % expr.format(), 3: ids % id} + if id4 is not None: + idDict[4] = ids % id4 + check('class', 'template<> {key}C<a[%s]>' % expr, idDict) + + class Config: + cpp_id_attributes = ["id_attr"] + cpp_paren_attributes = ["paren_attr"] + + parser = DefinitionParser(expr, location=None, + config=Config()) + parser.allowFallbackExpressionParsing = False + ast = parser.parse_expression() + res = str(ast) + if res != expr: + print("") + print("Input: ", expr) + print("Result: ", res) + raise DefinitionError + displayString = ast.get_display_string() + if res != displayString: + # note: if the expression contains an anon name then this will trigger a falsely + print("") + print("Input: ", expr) + print("Result: ", res) + print("Display: ", displayString) + raise DefinitionError + + # primary + exprCheck('nullptr', 'LDnE') + exprCheck('true', 'L1E') + exprCheck('false', 'L0E') + ints = ['5', '0', '075', '0x0123456789ABCDEF', '0XF', '0b1', '0B1', + "0b0'1'0", "00'1'2", "0x0'1'2", "1'2'3"] + unsignedSuffix = ['', 'u', 'U'] + longSuffix = ['', 'l', 'L', 'll', 'LL'] + for i in ints: + for u in unsignedSuffix: + for l in longSuffix: + expr = i + u + l + exprCheck(expr, 'L' + expr.replace("'", "") + 'E') + expr = i + l + u + exprCheck(expr, 'L' + expr.replace("'", "") + 'E') + decimalFloats = ['5e42', '5e+42', '5e-42', + '5.', '5.e42', '5.e+42', '5.e-42', + '.5', '.5e42', '.5e+42', '.5e-42', + '5.0', '5.0e42', '5.0e+42', '5.0e-42', + "1'2'3e7'8'9", "1'2'3.e7'8'9", + ".4'5'6e7'8'9", "1'2'3.4'5'6e7'8'9"] + hexFloats = ['ApF', 'Ap+F', 'Ap-F', + 'A.', 'A.pF', 'A.p+F', 'A.p-F', + '.A', '.ApF', '.Ap+F', '.Ap-F', + 'A.B', 'A.BpF', 'A.Bp+F', 'A.Bp-F', + "A'B'Cp1'2'3", "A'B'C.p1'2'3", + ".D'E'Fp1'2'3", "A'B'C.D'E'Fp1'2'3"] + for suffix in ['', 'f', 'F', 'l', 'L']: + for e in decimalFloats: + expr = e + suffix + exprCheck(expr, 'L' + expr.replace("'", "") + 'E') + for e in hexFloats: + expr = "0x" + e + suffix + exprCheck(expr, 'L' + expr.replace("'", "") + 'E') + exprCheck('"abc\\"cba"', 'LA8_KcE') # string + exprCheck('this', 'fpT') + # character literals + charPrefixAndIds = [('', 'c'), ('u8', 'c'), ('u', 'Ds'), ('U', 'Di'), ('L', 'w')] + chars = [('a', '97'), ('\\n', '10'), ('\\012', '10'), ('\\0', '0'), + ('\\x0a', '10'), ('\\x0A', '10'), ('\\u0a42', '2626'), ('\\u0A42', '2626'), + ('\\U0001f34c', '127820'), ('\\U0001F34C', '127820')] + for p, t in charPrefixAndIds: + for c, val in chars: + exprCheck(f"{p}'{c}'", t + val) + # user-defined literals + for i in ints: + exprCheck(i + '_udl', 'clL_Zli4_udlEL' + i.replace("'", "") + 'EE') + exprCheck(i + 'uludl', 'clL_Zli5uludlEL' + i.replace("'", "") + 'EE') + for f in decimalFloats: + exprCheck(f + '_udl', 'clL_Zli4_udlEL' + f.replace("'", "") + 'EE') + exprCheck(f + 'fudl', 'clL_Zli4fudlEL' + f.replace("'", "") + 'EE') + for f in hexFloats: + exprCheck('0x' + f + '_udl', 'clL_Zli4_udlEL0x' + f.replace("'", "") + 'EE') + for p, t in charPrefixAndIds: + for c, val in chars: + exprCheck(f"{p}'{c}'_udl", 'clL_Zli4_udlE' + t + val + 'E') + exprCheck('"abc"_udl', 'clL_Zli4_udlELA3_KcEE') + # from issue #7294 + exprCheck('6.62607015e-34q_J', 'clL_Zli3q_JEL6.62607015e-34EE') + + # fold expressions, paren, name + exprCheck('(... + Ns)', '(... + Ns)', id4='flpl2Ns') + exprCheck('(Ns + ...)', '(Ns + ...)', id4='frpl2Ns') + exprCheck('(Ns + ... + 0)', '(Ns + ... + 0)', id4='fLpl2NsL0E') + exprCheck('(5)', 'L5E') + exprCheck('C', '1C') + # postfix + exprCheck('A(2)', 'cl1AL2EE') + exprCheck('A[2]', 'ix1AL2E') + exprCheck('a.b.c', 'dtdt1a1b1c') + exprCheck('a->b->c', 'ptpt1a1b1c') + exprCheck('i++', 'pp1i') + exprCheck('i--', 'mm1i') + exprCheck('dynamic_cast<T&>(i)++', 'ppdcR1T1i') + exprCheck('static_cast<T&>(i)++', 'ppscR1T1i') + exprCheck('reinterpret_cast<T&>(i)++', 'pprcR1T1i') + exprCheck('const_cast<T&>(i)++', 'ppccR1T1i') + exprCheck('typeid(T).name', 'dtti1T4name') + exprCheck('typeid(a + b).name', 'dttepl1a1b4name') + # unary + exprCheck('++5', 'pp_L5E') + exprCheck('--5', 'mm_L5E') + exprCheck('*5', 'deL5E') + exprCheck('&5', 'adL5E') + exprCheck('+5', 'psL5E') + exprCheck('-5', 'ngL5E') + exprCheck('!5', 'ntL5E') + exprCheck('not 5', 'ntL5E') + exprCheck('~5', 'coL5E') + exprCheck('compl 5', 'coL5E') + exprCheck('sizeof...(a)', 'sZ1a') + exprCheck('sizeof(T)', 'st1T') + exprCheck('sizeof -42', 'szngL42E') + exprCheck('alignof(T)', 'at1T') + exprCheck('noexcept(-42)', 'nxngL42E') + # new-expression + exprCheck('new int', 'nw_iE') + exprCheck('new volatile int', 'nw_ViE') + exprCheck('new int[42]', 'nw_AL42E_iE') + exprCheck('new int()', 'nw_ipiE') + exprCheck('new int(5, 42)', 'nw_ipiL5EL42EE') + exprCheck('::new int', 'nw_iE') + exprCheck('new int{{}}', 'nw_iilE') + exprCheck('new int{{5, 42}}', 'nw_iilL5EL42EE') + # delete-expression + exprCheck('delete p', 'dl1p') + exprCheck('delete [] p', 'da1p') + exprCheck('::delete p', 'dl1p') + exprCheck('::delete [] p', 'da1p') + # cast + exprCheck('(int)2', 'cviL2E') + # binary op + exprCheck('5 || 42', 'ooL5EL42E') + exprCheck('5 or 42', 'ooL5EL42E') + exprCheck('5 && 42', 'aaL5EL42E') + exprCheck('5 and 42', 'aaL5EL42E') + exprCheck('5 | 42', 'orL5EL42E') + exprCheck('5 bitor 42', 'orL5EL42E') + exprCheck('5 ^ 42', 'eoL5EL42E') + exprCheck('5 xor 42', 'eoL5EL42E') + exprCheck('5 & 42', 'anL5EL42E') + exprCheck('5 bitand 42', 'anL5EL42E') + # ['==', '!='] + exprCheck('5 == 42', 'eqL5EL42E') + exprCheck('5 != 42', 'neL5EL42E') + exprCheck('5 not_eq 42', 'neL5EL42E') + # ['<=', '>=', '<', '>', '<=>'] + exprCheck('5 <= 42', 'leL5EL42E') + exprCheck('A <= 42', 'le1AL42E') + exprCheck('5 >= 42', 'geL5EL42E') + exprCheck('5 < 42', 'ltL5EL42E') + exprCheck('A < 42', 'lt1AL42E') + exprCheck('5 > 42', 'gtL5EL42E') + exprCheck('A > 42', 'gt1AL42E') + exprCheck('5 <=> 42', 'ssL5EL42E') + exprCheck('A <=> 42', 'ss1AL42E') + # ['<<', '>>'] + exprCheck('5 << 42', 'lsL5EL42E') + exprCheck('A << 42', 'ls1AL42E') + exprCheck('5 >> 42', 'rsL5EL42E') + # ['+', '-'] + exprCheck('5 + 42', 'plL5EL42E') + exprCheck('5 - 42', 'miL5EL42E') + # ['*', '/', '%'] + exprCheck('5 * 42', 'mlL5EL42E') + exprCheck('5 / 42', 'dvL5EL42E') + exprCheck('5 % 42', 'rmL5EL42E') + # ['.*', '->*'] + exprCheck('5 .* 42', 'dsL5EL42E') + exprCheck('5 ->* 42', 'pmL5EL42E') + # conditional + exprCheck('5 ? 7 : 3', 'quL5EL7EL3E') + # assignment + exprCheck('a = 5', 'aS1aL5E') + exprCheck('a *= 5', 'mL1aL5E') + exprCheck('a /= 5', 'dV1aL5E') + exprCheck('a %= 5', 'rM1aL5E') + exprCheck('a += 5', 'pL1aL5E') + exprCheck('a -= 5', 'mI1aL5E') + exprCheck('a >>= 5', 'rS1aL5E') + exprCheck('a <<= 5', 'lS1aL5E') + exprCheck('a &= 5', 'aN1aL5E') + exprCheck('a and_eq 5', 'aN1aL5E') + exprCheck('a ^= 5', 'eO1aL5E') + exprCheck('a xor_eq 5', 'eO1aL5E') + exprCheck('a |= 5', 'oR1aL5E') + exprCheck('a or_eq 5', 'oR1aL5E') + exprCheck('a = {{1, 2, 3}}', 'aS1ailL1EL2EL3EE') + # complex assignment and conditional + exprCheck('5 = 6 = 7', 'aSL5EaSL6EL7E') + exprCheck('5 = 6 ? 7 = 8 : 3', 'aSL5EquL6EaSL7EL8EL3E') + # comma operator + exprCheck('a, 5', 'cm1aL5E') + + # Additional tests + # a < expression that starts with something that could be a template + exprCheck('A < 42', 'lt1AL42E') + check('function', 'template<> void f(A<B, 2> &v)', + {2: "IE1fR1AI1BX2EE", 3: "IE1fR1AI1BXL2EEE", 4: "IE1fvR1AI1BXL2EEE"}) + exprCheck('A<1>::value', 'N1AIXL1EEE5valueE') + check('class', "template<int T = 42> {key}A", {2: "I_iE1A"}) + check('enumerator', '{key}A = std::numeric_limits<unsigned long>::max()', {2: "1A"}) + + exprCheck('operator()()', 'clclE') + exprCheck('operator()<int>()', 'clclIiEE') + + # pack expansion + exprCheck('a(b(c, 1 + d...)..., e(f..., g))', 'cl1aspcl1b1cspplL1E1dEcl1esp1f1gEE') + + +def test_domain_cpp_ast_type_definitions(): + check("type", "public bool b", {1: "b", 2: "1b"}, "{key}bool b", key='typedef') + check("type", "{key}bool A::b", {1: "A::b", 2: "N1A1bE"}, key='typedef') + check("type", "{key}bool *b", {1: "b", 2: "1b"}, key='typedef') + check("type", "{key}bool *const b", {1: "b", 2: "1b"}, key='typedef') + check("type", "{key}bool *volatile const b", {1: "b", 2: "1b"}, key='typedef') + check("type", "{key}bool *volatile const b", {1: "b", 2: "1b"}, key='typedef') + check("type", "{key}bool *volatile const *b", {1: "b", 2: "1b"}, key='typedef') + check("type", "{key}bool &b", {1: "b", 2: "1b"}, key='typedef') + check("type", "{key}bool b[]", {1: "b", 2: "1b"}, key='typedef') + check("type", "{key}std::pair<int, int> coord", {1: "coord", 2: "5coord"}, key='typedef') + check("type", "{key}long long int foo", {1: "foo", 2: "3foo"}, key='typedef') + check("type", '{key}std::vector<std::pair<std::string, long long>> module::blah', + {1: "module::blah", 2: "N6module4blahE"}, key='typedef') + check("type", "{key}std::function<void()> F", {1: "F", 2: "1F"}, key='typedef') + check("type", "{key}std::function<R(A1, A2)> F", {1: "F", 2: "1F"}, key='typedef') + check("type", "{key}std::function<R(A1, A2, A3)> F", {1: "F", 2: "1F"}, key='typedef') + check("type", "{key}std::function<R(A1, A2, A3, As...)> F", {1: "F", 2: "1F"}, key='typedef') + check("type", "{key}MyContainer::const_iterator", + {1: "MyContainer::const_iterator", 2: "N11MyContainer14const_iteratorE"}) + check("type", + "public MyContainer::const_iterator", + {1: "MyContainer::const_iterator", 2: "N11MyContainer14const_iteratorE"}, + output="{key}MyContainer::const_iterator") + # test decl specs on right + check("type", "{key}bool const b", {1: "b", 2: "1b"}, key='typedef') + # test name in global scope + check("type", "{key}bool ::B::b", {1: "B::b", 2: "N1B1bE"}, key='typedef') + + check('type', '{key}A = B', {2: '1A'}, key='using') + check('type', '{key}A = decltype(b)', {2: '1A'}, key='using') + + # from breathe#267 (named function parameters for function pointers + check('type', '{key}void (*gpio_callback_t)(struct device *port, uint32_t pin)', + {1: 'gpio_callback_t', 2: '15gpio_callback_t'}, key='typedef') + check('type', '{key}void (*f)(std::function<void(int i)> g)', {1: 'f', 2: '1f'}, + key='typedef') + + check('type', '{key}T = A::template B<int>::template C<double>', {2: '1T'}, key='using') + + check('type', '{key}T = Q<A::operator()>', {2: '1T'}, key='using') + check('type', '{key}T = Q<A::operator()<int>>', {2: '1T'}, key='using') + check('type', '{key}T = Q<A::operator bool>', {2: '1T'}, key='using') + + +def test_domain_cpp_ast_concept_definitions(): + check('concept', 'template<typename Param> {key}A::B::Concept', + {2: 'I0EN1A1B7ConceptE'}) + check('concept', 'template<typename A, typename B, typename ...C> {key}Foo', + {2: 'I00DpE3Foo'}) + with pytest.raises(DefinitionError): + parse('concept', '{key}Foo') + with pytest.raises(DefinitionError): + parse('concept', 'template<typename T> template<typename U> {key}Foo') + + +def test_domain_cpp_ast_member_definitions(): + check('member', ' const std::string & name = 42', + {1: "name__ssCR", 2: "4name"}, output='const std::string &name = 42') + check('member', ' const std::string & name', {1: "name__ssCR", 2: "4name"}, + output='const std::string &name') + check('member', ' const std::string & name [ n ]', + {1: "name__ssCRA", 2: "4name"}, output='const std::string &name[n]') + check('member', 'const std::vector< unsigned int, long> &name', + {1: "name__std::vector:unsigned-i.l:CR", 2: "4name"}, + output='const std::vector<unsigned int, long> &name') + check('member', 'module::myclass foo[n]', {1: "foo__module::myclassA", 2: "3foo"}) + check('member', 'int *const p', {1: 'p__iPC', 2: '1p'}) + check('member', 'extern int myInt', {1: 'myInt__i', 2: '5myInt'}) + check('member', 'thread_local int myInt', {1: 'myInt__i', 2: '5myInt'}) + check('member', 'extern thread_local int myInt', {1: 'myInt__i', 2: '5myInt'}) + check('member', 'thread_local extern int myInt', {1: 'myInt__i', 2: '5myInt'}, + 'extern thread_local int myInt') + + # tests based on https://en.cppreference.com/w/cpp/language/bit_field + check('member', 'int b : 3', {1: 'b__i', 2: '1b'}) + check('member', 'int b : 8 = 42', {1: 'b__i', 2: '1b'}) + check('member', 'int b : 8{42}', {1: 'b__i', 2: '1b'}) + # TODO: enable once the ternary operator is supported + # check('member', 'int b : true ? 8 : a = 42', {1: 'b__i', 2: '1b'}) + # TODO: enable once the ternary operator is supported + # check('member', 'int b : (true ? 8 : a) = 42', {1: 'b__i', 2: '1b'}) + check('member', 'int b : 1 || new int{0}', {1: 'b__i', 2: '1b'}) + + check('member', 'inline int n', {1: 'n__i', 2: '1n'}) + check('member', 'constinit int n', {1: 'n__i', 2: '1n'}) + + +def test_domain_cpp_ast_function_definitions(): + check('function', 'void f(volatile int)', {1: "f__iV", 2: "1fVi"}) + check('function', 'void f(std::size_t)', {1: "f__std::s", 2: "1fNSt6size_tE"}) + check('function', 'operator bool() const', {1: "castto-b-operatorC", 2: "NKcvbEv"}) + check('function', 'A::operator bool() const', + {1: "A::castto-b-operatorC", 2: "NK1AcvbEv"}) + check('function', 'A::operator bool() volatile const &', + {1: "A::castto-b-operatorVCR", 2: "NVKR1AcvbEv"}) + check('function', 'A::operator bool() volatile const &&', + {1: "A::castto-b-operatorVCO", 2: "NVKO1AcvbEv"}) + check('function', 'bool namespaced::theclass::method(arg1, arg2)', + {1: "namespaced::theclass::method__arg1.arg2", + 2: "N10namespaced8theclass6methodE4arg14arg2"}) + x = 'std::vector<std::pair<std::string, int>> &module::test(register int ' \ + 'foo, bar, std::string baz = "foobar, blah, bleh") const = 0' + check('function', x, {1: "module::test__i.bar.ssC", + 2: "NK6module4testEi3barNSt6stringE"}) + check('function', 'void f(std::pair<A, B>)', + {1: "f__std::pair:A.B:", 2: "1fNSt4pairI1A1BEE"}) + check('function', 'explicit module::myclass::foo::foo()', + {1: "module::myclass::foo::foo", 2: "N6module7myclass3foo3fooEv"}) + check('function', 'module::myclass::foo::~foo()', + {1: "module::myclass::foo::~foo", 2: "N6module7myclass3fooD0Ev"}) + check('function', 'int printf(const char *fmt, ...)', + {1: "printf__cCP.z", 2: "6printfPKcz"}) + check('function', 'int foo(const unsigned int j)', + {1: "foo__unsigned-iC", 2: "3fooKj"}) + check('function', 'int foo(const int *const ptr)', + {1: "foo__iCPC", 2: "3fooPCKi"}) + check('function', 'module::myclass::operator std::vector<std::string>()', + {1: "module::myclass::castto-std::vector:ss:-operator", + 2: "N6module7myclasscvNSt6vectorINSt6stringEEEEv"}) + check('function', + 'void operator()(const boost::array<VertexID, 2> &v) const', + {1: "call-operator__boost::array:VertexID.2:CRC", + 2: "NKclERKN5boost5arrayI8VertexIDX2EEE", + 3: "NKclERKN5boost5arrayI8VertexIDXL2EEEE"}) + check('function', + 'void operator()(const boost::array<VertexID, 2, "foo, bar"> &v) const', + {1: 'call-operator__boost::array:VertexID.2."foo,--bar":CRC', + 2: 'NKclERKN5boost5arrayI8VertexIDX2EX"foo, bar"EEE', + 3: 'NKclERKN5boost5arrayI8VertexIDXL2EEXLA9_KcEEEE'}) + check('function', 'MyClass::MyClass(MyClass::MyClass&&)', + {1: "MyClass::MyClass__MyClass::MyClassRR", + 2: "N7MyClass7MyClassERRN7MyClass7MyClassE"}) + check('function', 'constexpr int get_value()', {1: "get_valueCE", 2: "9get_valuev"}) + check('function', 'static constexpr int get_value()', + {1: "get_valueCE", 2: "9get_valuev"}) + check('function', 'int get_value() const noexcept', + {1: "get_valueC", 2: "NK9get_valueEv"}) + check('function', 'int get_value() const noexcept(std::is_nothrow_move_constructible<T>::value)', + {1: "get_valueC", 2: "NK9get_valueEv"}) + check('function', 'int get_value() const noexcept("see below")', + {1: "get_valueC", 2: "NK9get_valueEv"}) + check('function', 'int get_value() const noexcept = delete', + {1: "get_valueC", 2: "NK9get_valueEv"}) + check('function', 'int get_value() volatile const', + {1: "get_valueVC", 2: "NVK9get_valueEv"}) + check('function', 'MyClass::MyClass(MyClass::MyClass&&) = default', + {1: "MyClass::MyClass__MyClass::MyClassRR", + 2: "N7MyClass7MyClassERRN7MyClass7MyClassE"}) + check('function', 'virtual MyClass::a_virtual_function() const override', + {1: "MyClass::a_virtual_functionC", 2: "NK7MyClass18a_virtual_functionEv"}) + check('function', 'A B() override', {1: "B", 2: "1Bv"}) + check('function', 'A B() final', {1: "B", 2: "1Bv"}) + check('function', 'A B() final override', {1: "B", 2: "1Bv"}) + check('function', 'A B() override final', {1: "B", 2: "1Bv"}, + output='A B() final override') + check('function', 'MyClass::a_member_function() volatile', + {1: "MyClass::a_member_functionV", 2: "NV7MyClass17a_member_functionEv"}) + check('function', 'MyClass::a_member_function() volatile const', + {1: "MyClass::a_member_functionVC", 2: "NVK7MyClass17a_member_functionEv"}) + check('function', 'MyClass::a_member_function() &&', + {1: "MyClass::a_member_functionO", 2: "NO7MyClass17a_member_functionEv"}) + check('function', 'MyClass::a_member_function() &', + {1: "MyClass::a_member_functionR", 2: "NR7MyClass17a_member_functionEv"}) + check('function', 'MyClass::a_member_function() const &', + {1: "MyClass::a_member_functionCR", 2: "NKR7MyClass17a_member_functionEv"}) + check('function', 'int main(int argc, char *argv[])', + {1: "main__i.cPA", 2: "4mainiA_Pc"}) + check('function', 'MyClass &MyClass::operator++()', + {1: "MyClass::inc-operator", 2: "N7MyClassppEv"}) + check('function', 'MyClass::pointer MyClass::operator->()', + {1: "MyClass::pointer-operator", 2: "N7MyClassptEv"}) + + x = 'std::vector<std::pair<std::string, int>> &module::test(register int ' \ + 'foo, bar[n], std::string baz = "foobar, blah, bleh") const = 0' + check('function', x, {1: "module::test__i.barA.ssC", + 2: "NK6module4testEiAn_3barNSt6stringE", + 3: "NK6module4testEiA1n_3barNSt6stringE"}) + check('function', + 'int foo(Foo f = Foo(double(), std::make_pair(int(2), double(3.4))))', + {1: "foo__Foo", 2: "3foo3Foo"}) + check('function', 'int foo(A a = x(a))', {1: "foo__A", 2: "3foo1A"}) + with pytest.raises(DefinitionError): + parse('function', 'int foo(B b=x(a)') + with pytest.raises(DefinitionError): + parse('function', 'int foo)C c=x(a))') + with pytest.raises(DefinitionError): + parse('function', 'int foo(D d=x(a') + check('function', 'int foo(const A&... a)', {1: "foo__ACRDp", 2: "3fooDpRK1A"}) + check('function', 'int foo(const A&...)', {1: "foo__ACRDp", 2: "3fooDpRK1A"}) + check('function', 'int foo(const A*... a)', {1: "foo__ACPDp", 2: "3fooDpPK1A"}) + check('function', 'int foo(const A*...)', {1: "foo__ACPDp", 2: "3fooDpPK1A"}) + check('function', 'int foo(const int A::*... a)', {2: "3fooDpM1AKi"}) + check('function', 'int foo(const int A::*...)', {2: "3fooDpM1AKi"}) + # check('function', 'int foo(int (*a)(A)...)', {1: "foo__ACRDp", 2: "3fooDpPK1A"}) + # check('function', 'int foo(int (*)(A)...)', {1: "foo__ACRDp", 2: "3fooDpPK1A"}) + check('function', 'virtual void f()', {1: "f", 2: "1fv"}) + # test for ::nestedName, from issue 1738 + check("function", "result(int val, ::std::error_category const &cat)", + {1: "result__i.std::error_categoryCR", 2: "6resultiRKNSt14error_categoryE"}) + check("function", "int *f()", {1: "f", 2: "1fv"}) + # tests derived from issue #1753 (skip to keep sanity) + check("function", "f(int (&array)[10])", {2: "1fRA10_i", 3: "1fRAL10E_i"}) + check("function", "void f(int (&array)[10])", {2: "1fRA10_i", 3: "1fRAL10E_i"}) + check("function", "void f(float *q(double))", {2: "1fFPfdE"}) + check("function", "void f(float *(*q)(double))", {2: "1fPFPfdE"}) + check("function", "void f(float (*q)(double))", {2: "1fPFfdE"}) + check("function", "int (*f(double d))(float)", {1: "f__double", 2: "1fd"}) + check("function", "int (*f(bool b))[5]", {1: "f__b", 2: "1fb"}) + check("function", "int (*A::f(double d) const)(float)", + {1: "A::f__doubleC", 2: "NK1A1fEd"}) + check("function", "void f(std::shared_ptr<int(double)> ptr)", + {2: "1fNSt10shared_ptrIFidEEE"}) + check("function", "void f(int *const p)", {1: "f__iPC", 2: "1fPCi"}) + check("function", "void f(int *volatile const p)", {1: "f__iPVC", 2: "1fPVCi"}) + + check('function', 'extern int f()', {1: 'f', 2: '1fv'}) + check('function', 'consteval int f()', {1: 'f', 2: '1fv'}) + + check('function', 'explicit(true) void f()', {1: 'f', 2: '1fv'}) + + check('function', 'decltype(auto) f()', {1: 'f', 2: "1fv"}) + + # TODO: make tests for functions in a template, e.g., Test<int&&()> + # such that the id generation for function type types is correct. + + check('function', 'friend std::ostream &f(std::ostream &s, int i)', + {1: 'f__osR.i', 2: '1fRNSt7ostreamEi'}) + + # from breathe#223 + check('function', 'void f(struct E e)', {1: 'f__E', 2: '1f1E'}) + check('function', 'void f(class E e)', {1: 'f__E', 2: '1f1E'}) + check('function', 'void f(typename E e)', {1: 'f__E', 2: '1f1E'}) + check('function', 'void f(enum E e)', {1: 'f__E', 2: '1f1E'}) + check('function', 'void f(union E e)', {1: 'f__E', 2: '1f1E'}) + + # pointer to member (function) + check('function', 'void f(int C::*)', {2: '1fM1Ci'}) + check('function', 'void f(int C::* p)', {2: '1fM1Ci'}) + check('function', 'void f(int ::C::* p)', {2: '1fM1Ci'}) + check('function', 'void f(int C::*const)', {2: '1fKM1Ci'}) + check('function', 'void f(int C::*const&)', {2: '1fRKM1Ci'}) + check('function', 'void f(int C::*volatile)', {2: '1fVM1Ci'}) + check('function', 'void f(int C::*const volatile)', {2: '1fVKM1Ci'}, + output='void f(int C::*volatile const)') + check('function', 'void f(int C::*volatile const)', {2: '1fVKM1Ci'}) + check('function', 'void f(int (C::*)(float, double))', {2: '1fM1CFifdE'}) + check('function', 'void f(int (C::* p)(float, double))', {2: '1fM1CFifdE'}) + check('function', 'void f(int (::C::* p)(float, double))', {2: '1fM1CFifdE'}) + check('function', 'void f(void (C::*)() const &)', {2: '1fM1CKRFvvE'}) + check('function', 'int C::* f(int, double)', {2: '1fid'}) + check('function', 'void f(int C::* *p)', {2: '1fPM1Ci'}) + check('function', 'void f(int C::**)', {2: '1fPM1Ci'}) + check('function', 'void f(int C::*const *p)', {2: '1fPKM1Ci'}) + check('function', 'void f(int C::*const*)', {2: '1fPKM1Ci'}) + + # exceptions from return type mangling + check('function', 'template<typename T> C()', {2: 'I0E1Cv'}) + check('function', 'template<typename T> operator int()', {2: 'I0Ecviv'}) + + # trailing return types + ids = {1: 'f', 2: '1fv'} + check('function', 'int f()', ids) + check('function', 'auto f() -> int', ids) + check('function', 'virtual auto f() -> int = 0', ids) + check('function', 'virtual auto f() -> int final', ids) + check('function', 'virtual auto f() -> int override', ids) + + ids = {2: 'I0E1fv', 4: 'I0E1fiv'} + check('function', 'template<typename T> int f()', ids) + check('function', 'template<typename T> f() -> int', ids) + + # from breathe#441 + check('function', 'auto MakeThingy() -> Thingy*', {1: 'MakeThingy', 2: '10MakeThingyv'}) + + # from #8960 + check('function', 'void f(void (*p)(int, double), int i)', {2: '1fPFvidEi'}) + + # from #9535 comment + check('function', 'void f(void (*p)(int) = &foo)', {2: '1fPFviE'}) + + +def test_domain_cpp_ast_operators(): + check('function', 'void operator new()', {1: "new-operator", 2: "nwv"}) + check('function', 'void operator new[]()', {1: "new-array-operator", 2: "nav"}) + check('function', 'void operator delete()', {1: "delete-operator", 2: "dlv"}) + check('function', 'void operator delete[]()', {1: "delete-array-operator", 2: "dav"}) + check('function', 'operator bool() const', {1: "castto-b-operatorC", 2: "NKcvbEv"}) + check('function', 'void operator""_udl()', {2: 'li4_udlv'}) + + check('function', 'void operator~()', {1: "inv-operator", 2: "cov"}) + check('function', 'void operator compl()', {2: "cov"}) + check('function', 'void operator+()', {1: "add-operator", 2: "plv"}) + check('function', 'void operator-()', {1: "sub-operator", 2: "miv"}) + check('function', 'void operator*()', {1: "mul-operator", 2: "mlv"}) + check('function', 'void operator/()', {1: "div-operator", 2: "dvv"}) + check('function', 'void operator%()', {1: "mod-operator", 2: "rmv"}) + check('function', 'void operator&()', {1: "and-operator", 2: "anv"}) + check('function', 'void operator bitand()', {2: "anv"}) + check('function', 'void operator|()', {1: "or-operator", 2: "orv"}) + check('function', 'void operator bitor()', {2: "orv"}) + check('function', 'void operator^()', {1: "xor-operator", 2: "eov"}) + check('function', 'void operator xor()', {2: "eov"}) + check('function', 'void operator=()', {1: "assign-operator", 2: "aSv"}) + check('function', 'void operator+=()', {1: "add-assign-operator", 2: "pLv"}) + check('function', 'void operator-=()', {1: "sub-assign-operator", 2: "mIv"}) + check('function', 'void operator*=()', {1: "mul-assign-operator", 2: "mLv"}) + check('function', 'void operator/=()', {1: "div-assign-operator", 2: "dVv"}) + check('function', 'void operator%=()', {1: "mod-assign-operator", 2: "rMv"}) + check('function', 'void operator&=()', {1: "and-assign-operator", 2: "aNv"}) + check('function', 'void operator and_eq()', {2: "aNv"}) + check('function', 'void operator|=()', {1: "or-assign-operator", 2: "oRv"}) + check('function', 'void operator or_eq()', {2: "oRv"}) + check('function', 'void operator^=()', {1: "xor-assign-operator", 2: "eOv"}) + check('function', 'void operator xor_eq()', {2: "eOv"}) + check('function', 'void operator<<()', {1: "lshift-operator", 2: "lsv"}) + check('function', 'void operator>>()', {1: "rshift-operator", 2: "rsv"}) + check('function', 'void operator<<=()', {1: "lshift-assign-operator", 2: "lSv"}) + check('function', 'void operator>>=()', {1: "rshift-assign-operator", 2: "rSv"}) + check('function', 'void operator==()', {1: "eq-operator", 2: "eqv"}) + check('function', 'void operator!=()', {1: "neq-operator", 2: "nev"}) + check('function', 'void operator not_eq()', {2: "nev"}) + check('function', 'void operator<()', {1: "lt-operator", 2: "ltv"}) + check('function', 'void operator>()', {1: "gt-operator", 2: "gtv"}) + check('function', 'void operator<=()', {1: "lte-operator", 2: "lev"}) + check('function', 'void operator>=()', {1: "gte-operator", 2: "gev"}) + check('function', 'void operator<=>()', {2: "ssv"}) + check('function', 'void operator!()', {1: "not-operator", 2: "ntv"}) + check('function', 'void operator not()', {2: "ntv"}) + check('function', 'void operator&&()', {1: "sand-operator", 2: "aav"}) + check('function', 'void operator and()', {2: "aav"}) + check('function', 'void operator||()', {1: "sor-operator", 2: "oov"}) + check('function', 'void operator or()', {2: "oov"}) + check('function', 'void operator++()', {1: "inc-operator", 2: "ppv"}) + check('function', 'void operator--()', {1: "dec-operator", 2: "mmv"}) + check('function', 'void operator,()', {1: "comma-operator", 2: "cmv"}) + check('function', 'void operator->*()', {1: "pointer-by-pointer-operator", 2: "pmv"}) + check('function', 'void operator->()', {1: "pointer-operator", 2: "ptv"}) + check('function', 'void operator()()', {1: "call-operator", 2: "clv"}) + check('function', 'void operator[]()', {1: "subscript-operator", 2: "ixv"}) + + +def test_domain_cpp_ast_nested_name(): + check('class', '{key}::A', {1: "A", 2: "1A"}) + check('class', '{key}::A::B', {1: "A::B", 2: "N1A1BE"}) + check('function', 'void f(::A a)', {1: "f__A", 2: "1f1A"}) + check('function', 'void f(::A::B a)', {1: "f__A::B", 2: "1fN1A1BE"}) + + +def test_domain_cpp_ast_class_definitions(): + check('class', 'public A', {1: "A", 2: "1A"}, output='{key}A') + check('class', 'private {key}A', {1: "A", 2: "1A"}) + check('class', '{key}A final', {1: 'A', 2: '1A'}) + + # test bases + check('class', '{key}A', {1: "A", 2: "1A"}) + check('class', '{key}A::B::C', {1: "A::B::C", 2: "N1A1B1CE"}) + check('class', '{key}A : B', {1: "A", 2: "1A"}) + check('class', '{key}A : private B', {1: "A", 2: "1A"}) + check('class', '{key}A : public B', {1: "A", 2: "1A"}) + check('class', '{key}A : B, C', {1: "A", 2: "1A"}) + check('class', '{key}A : B, protected C, D', {1: "A", 2: "1A"}) + check('class', 'A : virtual private B', {1: 'A', 2: '1A'}, output='{key}A : private virtual B') + check('class', '{key}A : private virtual B', {1: 'A', 2: '1A'}) + check('class', '{key}A : B, virtual C', {1: 'A', 2: '1A'}) + check('class', '{key}A : public virtual B', {1: 'A', 2: '1A'}) + check('class', '{key}A : B, C...', {1: 'A', 2: '1A'}) + check('class', '{key}A : B..., C', {1: 'A', 2: '1A'}) + + # from #4094 + check('class', 'template<class, class = std::void_t<>> {key}has_var', {2: 'I00E7has_var'}) + check('class', 'template<class T> {key}has_var<T, std::void_t<decltype(&T::var)>>', + {2: 'I0E7has_varI1TNSt6void_tIDTadN1T3varEEEEE'}) + + check('class', 'template<typename ...Ts> {key}T<int (*)(Ts)...>', + {2: 'IDpE1TIJPFi2TsEEE'}) + check('class', 'template<int... Is> {key}T<(Is)...>', + {2: 'I_DpiE1TIJX(Is)EEE', 3: 'I_DpiE1TIJX2IsEEE'}) + + +def test_domain_cpp_ast_union_definitions(): + check('union', '{key}A', {2: "1A"}) + + +def test_domain_cpp_ast_enum_definitions(): + check('enum', '{key}A', {2: "1A"}) + check('enum', '{key}A : std::underlying_type<B>::type', {2: "1A"}) + check('enum', '{key}A : unsigned int', {2: "1A"}) + check('enum', 'public A', {2: "1A"}, output='{key}A') + check('enum', 'private {key}A', {2: "1A"}) + + check('enumerator', '{key}A', {2: "1A"}) + check('enumerator', '{key}A = std::numeric_limits<unsigned long>::max()', {2: "1A"}) + + +def test_domain_cpp_ast_anon_definitions(): + check('class', '@a', {3: "Ut1_a"}, asTextOutput='class [anonymous]') + check('union', '@a', {3: "Ut1_a"}, asTextOutput='union [anonymous]') + check('enum', '@a', {3: "Ut1_a"}, asTextOutput='enum [anonymous]') + check('class', '@1', {3: "Ut1_1"}, asTextOutput='class [anonymous]') + check('class', '@a::A', {3: "NUt1_a1AE"}, asTextOutput='class [anonymous]::A') + + check('function', 'int f(int @a)', {1: 'f__i', 2: '1fi'}, + asTextOutput='int f(int [anonymous])') + + +def test_domain_cpp_ast_templates(): + check('class', "A<T>", {2: "IE1AI1TE"}, output="template<> {key}A<T>") + # first just check which objects support templating + check('class', "template<> {key}A", {2: "IE1A"}) + check('function', "template<> void A()", {2: "IE1Av", 4: "IE1Avv"}) + check('member', "template<> A a", {2: "IE1a"}) + check('type', "template<> {key}a = A", {2: "IE1a"}, key='using') + with pytest.raises(DefinitionError): + parse('enum', "template<> A") + with pytest.raises(DefinitionError): + parse('enumerator', "template<> A") + # then all the real tests + check('class', "template<typename T1, typename T2> {key}A", {2: "I00E1A"}) + check('type', "template<> {key}a", {2: "IE1a"}, key='using') + + check('class', "template<typename T> {key}A", {2: "I0E1A"}) + check('class', "template<class T> {key}A", {2: "I0E1A"}) + check('class', "template<typename ...T> {key}A", {2: "IDpE1A"}) + check('class', "template<typename...> {key}A", {2: "IDpE1A"}) + check('class', "template<typename = Test> {key}A", {2: "I0E1A"}) + check('class', "template<typename T = Test> {key}A", {2: "I0E1A"}) + + check('class', "template<template<typename> typename T> {key}A", {2: "II0E0E1A"}) + check('class', "template<template<typename> class T> {key}A", {2: "II0E0E1A"}) + check('class', "template<template<typename> typename> {key}A", {2: "II0E0E1A"}) + check('class', "template<template<typename> typename ...T> {key}A", {2: "II0EDpE1A"}) + check('class', "template<template<typename> typename...> {key}A", {2: "II0EDpE1A"}) + check('class', "template<typename T, template<typename> typename...> {key}A", {2: "I0I0EDpE1A"}) + + check('class', "template<int> {key}A", {2: "I_iE1A"}) + check('class', "template<int T> {key}A", {2: "I_iE1A"}) + check('class', "template<int... T> {key}A", {2: "I_DpiE1A"}) + check('class', "template<int T = 42> {key}A", {2: "I_iE1A"}) + check('class', "template<int = 42> {key}A", {2: "I_iE1A"}) + + check('class', "template<typename A<B>::C> {key}A", {2: "I_N1AI1BE1CEE1A"}) + check('class', "template<typename A<B>::C = 42> {key}A", {2: "I_N1AI1BE1CEE1A"}) + # from #7944 + check('function', "template<typename T, " + "typename std::enable_if<!has_overloaded_addressof<T>::value, bool>::type = false" + "> constexpr T *static_addressof(T &ref)", + {2: "I0_NSt9enable_ifIX!has_overloaded_addressof<T>::valueEbE4typeEE16static_addressofR1T", + 3: "I0_NSt9enable_ifIXntN24has_overloaded_addressofI1TE5valueEEbE4typeEE16static_addressofR1T", + 4: "I0_NSt9enable_ifIXntN24has_overloaded_addressofI1TE5valueEEbE4typeEE16static_addressofP1TR1T"}) + + check('class', "template<> {key}A<NS::B<>>", {2: "IE1AIN2NS1BIEEE"}) + + # from #2058 + check('function', + "template<typename Char, typename Traits> " + "inline std::basic_ostream<Char, Traits> &operator<<(" + "std::basic_ostream<Char, Traits> &os, " + "const c_string_view_base<const Char, Traits> &str)", + {2: "I00ElsRNSt13basic_ostreamI4Char6TraitsEE" + "RK18c_string_view_baseIK4Char6TraitsE", + 4: "I00Els" + "RNSt13basic_ostreamI4Char6TraitsEE" + "RNSt13basic_ostreamI4Char6TraitsEE" + "RK18c_string_view_baseIK4Char6TraitsE"}) + + # template introductions + with pytest.raises(DefinitionError): + parse('enum', 'abc::ns::foo{id_0, id_1, id_2} A') + with pytest.raises(DefinitionError): + parse('enumerator', 'abc::ns::foo{id_0, id_1, id_2} A') + check('class', 'abc::ns::foo{{id_0, id_1, id_2}} {key}xyz::bar', + {2: 'I000EXN3abc2ns3fooEI4id_04id_14id_2EEN3xyz3barE'}) + check('class', 'abc::ns::foo{{id_0, id_1, ...id_2}} {key}xyz::bar', + {2: 'I00DpEXN3abc2ns3fooEI4id_04id_1sp4id_2EEN3xyz3barE'}) + check('class', 'abc::ns::foo{{id_0, id_1, id_2}} {key}xyz::bar<id_0, id_1, id_2>', + {2: 'I000EXN3abc2ns3fooEI4id_04id_14id_2EEN3xyz3barE'}) + check('class', 'abc::ns::foo{{id_0, id_1, ...id_2}} {key}xyz::bar<id_0, id_1, id_2...>', + {2: 'I00DpEXN3abc2ns3fooEI4id_04id_1sp4id_2EEN3xyz3barE'}) + + check('class', 'template<> Concept{{U}} {key}A<int>::B', {2: 'IEI0EX7ConceptI1UEEN1AIiE1BE'}) + + check('type', 'abc::ns::foo{{id_0, id_1, id_2}} {key}xyz::bar = ghi::qux', + {2: 'I000EXN3abc2ns3fooEI4id_04id_14id_2EEN3xyz3barE'}, key='using') + check('type', 'abc::ns::foo{{id_0, id_1, ...id_2}} {key}xyz::bar = ghi::qux', + {2: 'I00DpEXN3abc2ns3fooEI4id_04id_1sp4id_2EEN3xyz3barE'}, key='using') + check('function', 'abc::ns::foo{id_0, id_1, id_2} void xyz::bar()', + {2: 'I000EXN3abc2ns3fooEI4id_04id_14id_2EEN3xyz3barEv', + 4: 'I000EXN3abc2ns3fooEI4id_04id_14id_2EEN3xyz3barEvv'}) + check('function', 'abc::ns::foo{id_0, id_1, ...id_2} void xyz::bar()', + {2: 'I00DpEXN3abc2ns3fooEI4id_04id_1sp4id_2EEN3xyz3barEv', + 4: 'I00DpEXN3abc2ns3fooEI4id_04id_1sp4id_2EEN3xyz3barEvv'}) + check('member', 'abc::ns::foo{id_0, id_1, id_2} ghi::qux xyz::bar', + {2: 'I000EXN3abc2ns3fooEI4id_04id_14id_2EEN3xyz3barE'}) + check('member', 'abc::ns::foo{id_0, id_1, ...id_2} ghi::qux xyz::bar', + {2: 'I00DpEXN3abc2ns3fooEI4id_04id_1sp4id_2EEN3xyz3barE'}) + check('concept', 'Iterator{{T, U}} {key}Another', {2: 'I00EX8IteratorI1T1UEE7Another'}) + check('concept', 'template<typename ...Pack> {key}Numerics = (... && Numeric<Pack>)', + {2: 'IDpE8Numerics'}) + + # explicit specializations of members + check('member', 'template<> int A<int>::a', {2: 'IEN1AIiE1aE'}) + check('member', 'template int A<int>::a', {2: 'IEN1AIiE1aE'}, + output='template<> int A<int>::a') # same as above + check('member', 'template<> template<> int A<int>::B<int>::b', {2: 'IEIEN1AIiE1BIiE1bE'}) + check('member', 'template int A<int>::B<int>::b', {2: 'IEIEN1AIiE1BIiE1bE'}, + output='template<> template<> int A<int>::B<int>::b') # same as above + + # defaulted constrained type parameters + check('type', 'template<C T = int&> {key}A', {2: 'I_1CE1A'}, key='using') + + # pack expansion after non-type template parameter + check('type', 'template<int (X::*)(bool)...> {key}A', {2: 'I_DpM1XFibEE1A'}, key='using') + + +def test_domain_cpp_ast_placeholder_types(): + check('function', 'void f(Sortable auto &v)', {1: 'f__SortableR', 2: '1fR8Sortable'}) + check('function', 'void f(const Sortable auto &v)', {1: 'f__SortableCR', 2: '1fRK8Sortable'}) + check('function', 'void f(Sortable decltype(auto) &v)', {1: 'f__SortableR', 2: '1fR8Sortable'}) + check('function', 'void f(const Sortable decltype(auto) &v)', {1: 'f__SortableCR', 2: '1fRK8Sortable'}) + check('function', 'void f(Sortable decltype ( auto ) &v)', {1: 'f__SortableR', 2: '1fR8Sortable'}, + output='void f(Sortable decltype(auto) &v)') + + +def test_domain_cpp_ast_requires_clauses(): + check('function', 'template<typename T> requires A auto f() -> void requires B', + {4: 'I0EIQaa1A1BE1fvv'}) + check('function', 'template<typename T> requires A || B or C void f()', + {4: 'I0EIQoo1Aoo1B1CE1fvv'}) + check('function', 'void f() requires A || B || C', + {4: 'IQoo1Aoo1B1CE1fv'}) + check('function', 'Foo() requires A || B || C', + {4: 'IQoo1Aoo1B1CE3Foov'}) + check('function', 'template<typename T> requires A && B || C and D void f()', + {4: 'I0EIQooaa1A1Baa1C1DE1fvv'}) + check('function', + 'template<typename T> requires R<T> ' + + 'template<typename U> requires S<T> ' + + 'void A<T>::f() requires B', + {4: 'I0EIQ1RI1TEEI0EIQaa1SI1TE1BEN1A1fEvv'}) + check('function', + 'template<template<typename T> requires R<T> typename X> ' + + 'void f()', + {2: 'II0EIQ1RI1TEE0E1fv', 4: 'II0EIQ1RI1TEE0E1fvv'}) + check('type', + 'template<typename T> requires IsValid<T> {key}T = true_type', + {4: 'I0EIQ7IsValidI1TEE1T'}, key='using') + check('class', + 'template<typename T> requires IsValid<T> {key}T : Base', + {4: 'I0EIQ7IsValidI1TEE1T'}, key='class') + check('union', + 'template<typename T> requires IsValid<T> {key}T', + {4: 'I0EIQ7IsValidI1TEE1T'}, key='union') + check('member', + 'template<typename T> requires IsValid<T> int Val = 7', + {4: 'I0EIQ7IsValidI1TEE3Val'}) + + +def test_domain_cpp_ast_template_args(): + # from breathe#218 + check('function', + "template<typename F> " + "void allow(F *f, typename func<F, B, G != 1>::type tt)", + {2: "I0E5allowP1FN4funcI1F1BXG != 1EE4typeE", + 3: "I0E5allowP1FN4funcI1F1BXne1GL1EEE4typeE", + 4: "I0E5allowvP1FN4funcI1F1BXne1GL1EEE4typeE"}) + # from #3542 + check('type', "template<typename T> {key}" + "enable_if_not_array_t = std::enable_if_t<!is_array<T>::value, int>", + {2: "I0E21enable_if_not_array_t"}, + key='using') + + +def test_domain_cpp_ast_initializers(): + idsMember = {1: 'v__T', 2: '1v'} + idsFunction = {1: 'f__T', 2: '1f1T'} + idsTemplate = {2: 'I_1TE1fv', 4: 'I_1TE1fvv'} + # no init + check('member', 'T v', idsMember) + check('function', 'void f(T v)', idsFunction) + check('function', 'template<T v> void f()', idsTemplate) + # with '=', assignment-expression + check('member', 'T v = 42', idsMember) + check('function', 'void f(T v = 42)', idsFunction) + check('function', 'template<T v = 42> void f()', idsTemplate) + # with '=', braced-init + check('member', 'T v = {}', idsMember) + check('function', 'void f(T v = {})', idsFunction) + check('function', 'template<T v = {}> void f()', idsTemplate) + check('member', 'T v = {42, 42, 42}', idsMember) + check('function', 'void f(T v = {42, 42, 42})', idsFunction) + check('function', 'template<T v = {42, 42, 42}> void f()', idsTemplate) + check('member', 'T v = {42, 42, 42,}', idsMember) + check('function', 'void f(T v = {42, 42, 42,})', idsFunction) + check('function', 'template<T v = {42, 42, 42,}> void f()', idsTemplate) + check('member', 'T v = {42, 42, args...}', idsMember) + check('function', 'void f(T v = {42, 42, args...})', idsFunction) + check('function', 'template<T v = {42, 42, args...}> void f()', idsTemplate) + # without '=', braced-init + check('member', 'T v{}', idsMember) + check('member', 'T v{42, 42, 42}', idsMember) + check('member', 'T v{42, 42, 42,}', idsMember) + check('member', 'T v{42, 42, args...}', idsMember) + # other + check('member', 'T v = T{}', idsMember) + + +def test_domain_cpp_ast_attributes(): + # style: C++ + check('member', '[[]] int f', {1: 'f__i', 2: '1f'}) + check('member', '[ [ ] ] int f', {1: 'f__i', 2: '1f'}, + # this will fail when the proper grammar is implemented + output='[[ ]] int f') + check('member', '[[a]] int f', {1: 'f__i', 2: '1f'}) + # style: GNU + check('member', '__attribute__(()) int f', {1: 'f__i', 2: '1f'}) + check('member', '__attribute__((a)) int f', {1: 'f__i', 2: '1f'}) + check('member', '__attribute__((a, b)) int f', {1: 'f__i', 2: '1f'}) + check('member', '__attribute__((optimize(3))) int f', {1: 'f__i', 2: '1f'}) + check('member', '__attribute__((format(printf, 1, 2))) int f', {1: 'f__i', 2: '1f'}) + # style: user-defined id + check('member', 'id_attr int f', {1: 'f__i', 2: '1f'}) + # style: user-defined paren + check('member', 'paren_attr() int f', {1: 'f__i', 2: '1f'}) + check('member', 'paren_attr(a) int f', {1: 'f__i', 2: '1f'}) + check('member', 'paren_attr("") int f', {1: 'f__i', 2: '1f'}) + check('member', 'paren_attr(()[{}][]{}) int f', {1: 'f__i', 2: '1f'}) + with pytest.raises(DefinitionError): + parse('member', 'paren_attr(() int f') + with pytest.raises(DefinitionError): + parse('member', 'paren_attr([) int f') + with pytest.raises(DefinitionError): + parse('member', 'paren_attr({) int f') + with pytest.raises(DefinitionError): + parse('member', 'paren_attr([)]) int f') + with pytest.raises(DefinitionError): + parse('member', 'paren_attr((])) int f') + with pytest.raises(DefinitionError): + parse('member', 'paren_attr({]}) int f') + + # position: decl specs + check('function', 'static inline __attribute__(()) void f()', + {1: 'f', 2: '1fv'}, + output='__attribute__(()) static inline void f()') + check('function', '[[attr1]] [[attr2]] void f()', {1: 'f', 2: '1fv'}) + # position: declarator + check('member', 'int *[[attr1]] [[attr2]] i', {1: 'i__iP', 2: '1i'}) + check('member', 'int *const [[attr1]] [[attr2]] volatile i', {1: 'i__iPVC', 2: '1i'}, + output='int *[[attr1]] [[attr2]] volatile const i') + check('member', 'int &[[attr1]] [[attr2]] i', {1: 'i__iR', 2: '1i'}) + check('member', 'int *[[attr1]] [[attr2]] *i', {1: 'i__iPP', 2: '1i'}) + # position: parameters and qualifiers + check('function', 'void f() [[attr1]] [[attr2]]', {1: 'f', 2: '1fv'}) + + # position: class, union, enum + check('class', '{key}[[attr1]] [[attr2]] Foo', {1: 'Foo', 2: '3Foo'}, key='class') + check('union', '{key}[[attr1]] [[attr2]] Foo', {2: '3Foo'}, key='union') + check('enum', '{key}[[attr1]] [[attr2]] Foo', {2: '3Foo'}, key='enum') + # position: enumerator + check('enumerator', '{key}Foo [[attr1]] [[attr2]]', {2: '3Foo'}) + check('enumerator', '{key}Foo [[attr1]] [[attr2]] = 42', {2: '3Foo'}) + + +def test_domain_cpp_ast_xref_parsing(): + def check(target): + class Config: + cpp_id_attributes = ["id_attr"] + cpp_paren_attributes = ["paren_attr"] + parser = DefinitionParser(target, location=None, + config=Config()) + ast, isShorthand = parser.parse_xref_object() + parser.assert_end() + check('f') + check('f()') + check('void f()') + check('T f()') + + +@pytest.mark.parametrize( + ("param", "is_pack"), + [('typename', False), + ('typename T', False), + ('typename...', True), + ('typename... T', True), + ('int', False), + ('int N', False), + ('int* N', False), + ('int& N', False), + ('int&... N', True), + ('int*... N', True), + ('int...', True), + ('int... N', True), + ('auto', False), + ('auto...', True), + ('int X::*', False), + ('int X::*...', True), + ('int (X::*)(bool)', False), + ('int (X::*x)(bool)', False), + ('int (X::*)(bool)...', True), + ('template<typename> class', False), + ('template<typename> class...', True), + ]) +def test_domain_cpp_template_parameters_is_pack(param: str, is_pack: bool): + def parse_template_parameter(param: str): + ast = parse('type', 'template<' + param + '> X') + return ast.templatePrefix.templates[0].params[0] + ast = parse_template_parameter(param) + assert ast.isPack == is_pack + + +# def test_print(): +# # used for getting all the ids out for checking +# for a in ids: +# print(a) +# raise DefinitionError + + +def filter_warnings(warning, file): + lines = warning.getvalue().split("\n") + res = [l for l in lines if "domain-cpp" in l and f"{file}.rst" in l and + "WARNING: document isn't included in any toctree" not in l] + print(f"Filtered warnings for file '{file}':") + for w in res: + print(w) + return res + + +@pytest.mark.sphinx(testroot='domain-cpp', confoverrides={'nitpicky': True}) +def test_domain_cpp_build_multi_decl_lookup(app, status, warning): + app.builder.build_all() + ws = filter_warnings(warning, "lookup-key-overload") + assert len(ws) == 0 + + ws = filter_warnings(warning, "multi-decl-lookup") + assert len(ws) == 0 + + +@pytest.mark.sphinx(testroot='domain-cpp', confoverrides={'nitpicky': True}) +def test_domain_cpp_build_warn_template_param_qualified_name(app, status, warning): + app.builder.build_all() + ws = filter_warnings(warning, "warn-template-param-qualified-name") + assert len(ws) == 2 + assert "WARNING: cpp:type reference target not found: T::typeWarn" in ws[0] + assert "WARNING: cpp:type reference target not found: T::U::typeWarn" in ws[1] + + +@pytest.mark.sphinx(testroot='domain-cpp', confoverrides={'nitpicky': True}) +def test_domain_cpp_build_backslash_ok_true(app, status, warning): + app.builder.build_all() + ws = filter_warnings(warning, "backslash") + assert len(ws) == 0 + + +@pytest.mark.sphinx(testroot='domain-cpp', confoverrides={'nitpicky': True}) +def test_domain_cpp_build_semicolon(app, status, warning): + app.builder.build_all() + ws = filter_warnings(warning, "semicolon") + assert len(ws) == 0 + + +@pytest.mark.sphinx(testroot='domain-cpp', + confoverrides={'nitpicky': True, 'strip_signature_backslash': True}) +def test_domain_cpp_build_backslash_ok_false(app, status, warning): + app.builder.build_all() + ws = filter_warnings(warning, "backslash") + assert len(ws) == 1 + assert "WARNING: Parsing of expression failed. Using fallback parser." in ws[0] + + +@pytest.mark.sphinx(testroot='domain-cpp', confoverrides={'nitpicky': True}) +def test_domain_cpp_build_anon_dup_decl(app, status, warning): + app.builder.build_all() + ws = filter_warnings(warning, "anon-dup-decl") + assert len(ws) == 2 + assert "WARNING: cpp:identifier reference target not found: @a" in ws[0] + assert "WARNING: cpp:identifier reference target not found: @b" in ws[1] + + +@pytest.mark.sphinx(testroot='domain-cpp') +def test_domain_cpp_build_misuse_of_roles(app, status, warning): + app.builder.build_all() + ws = filter_warnings(warning, "roles-targets-ok") + assert len(ws) == 0 + + ws = filter_warnings(warning, "roles-targets-warn") + # the roles that should be able to generate warnings: + allRoles = ['class', 'struct', 'union', 'func', 'member', 'var', 'type', 'concept', 'enum', 'enumerator'] + ok = [ # targetType, okRoles + ('class', ['class', 'struct', 'type']), + ('union', ['union', 'type']), + ('func', ['func', 'type']), + ('member', ['member', 'var']), + ('type', ['type']), + ('concept', ['concept']), + ('enum', ['type', 'enum']), + ('enumerator', ['enumerator']), + ('functionParam', ['member', 'var']), + ('templateParam', ['class', 'struct', 'union', 'member', 'var', 'type']), + ] + warn = [] + for targetType, roles in ok: + txtTargetType = "function" if targetType == "func" else targetType + for r in allRoles: + if r not in roles: + warn.append(f"WARNING: cpp:{r} targets a {txtTargetType} (") + if targetType == 'templateParam': + warn.append(f"WARNING: cpp:{r} targets a {txtTargetType} (") + warn.append(f"WARNING: cpp:{r} targets a {txtTargetType} (") + warn = sorted(warn) + for w in ws: + assert "targets a" in w + ws = [w[w.index("WARNING:"):] for w in ws] + ws = sorted(ws) + print("Expected warnings:") + for w in warn: + print(w) + print("Actual warnings:") + for w in ws: + print(w) + + for i in range(min(len(warn), len(ws))): + assert ws[i].startswith(warn[i]) + + assert len(ws) == len(warn) + + +@pytest.mark.sphinx(testroot='domain-cpp', confoverrides={'add_function_parentheses': True}) +def test_domain_cpp_build_with_add_function_parentheses_is_True(app, status, warning): + app.builder.build_all() + + def check(spec, text, file): + pattern = '<li><p>%s<a .*?><code .*?><span .*?>%s</span></code></a></p></li>' % spec + res = re.search(pattern, text) + if not res: + print(f"Pattern\n\t{pattern}\nnot found in {file}") + raise AssertionError + rolePatterns = [ + ('', 'Sphinx'), + ('', 'Sphinx::version'), + ('', 'version'), + ('', 'List'), + ('', 'MyEnum'), + ] + parenPatterns = [ + ('ref function without parens ', r'paren_1\(\)'), + ('ref function with parens ', r'paren_2\(\)'), + ('ref function without parens, explicit title ', 'paren_3_title'), + ('ref function with parens, explicit title ', 'paren_4_title'), + ('ref op call without parens ', r'paren_5::operator\(\)\(\)'), + ('ref op call with parens ', r'paren_6::operator\(\)\(\)'), + ('ref op call without parens, explicit title ', 'paren_7_title'), + ('ref op call with parens, explicit title ', 'paren_8_title'), + ] + + f = 'roles.html' + t = (app.outdir / f).read_text(encoding='utf8') + for s in rolePatterns: + check(s, t, f) + for s in parenPatterns: + check(s, t, f) + + f = 'any-role.html' + t = (app.outdir / f).read_text(encoding='utf8') + for s in parenPatterns: + check(s, t, f) + + +@pytest.mark.sphinx(testroot='domain-cpp', confoverrides={'add_function_parentheses': False}) +def test_domain_cpp_build_with_add_function_parentheses_is_False(app, status, warning): + app.builder.build_all() + + def check(spec, text, file): + pattern = '<li><p>%s<a .*?><code .*?><span .*?>%s</span></code></a></p></li>' % spec + res = re.search(pattern, text) + if not res: + print(f"Pattern\n\t{pattern}\nnot found in {file}") + raise AssertionError + rolePatterns = [ + ('', 'Sphinx'), + ('', 'Sphinx::version'), + ('', 'version'), + ('', 'List'), + ('', 'MyEnum'), + ] + parenPatterns = [ + ('ref function without parens ', 'paren_1'), + ('ref function with parens ', 'paren_2'), + ('ref function without parens, explicit title ', 'paren_3_title'), + ('ref function with parens, explicit title ', 'paren_4_title'), + ('ref op call without parens ', r'paren_5::operator\(\)'), + ('ref op call with parens ', r'paren_6::operator\(\)'), + ('ref op call without parens, explicit title ', 'paren_7_title'), + ('ref op call with parens, explicit title ', 'paren_8_title'), + ] + + f = 'roles.html' + t = (app.outdir / f).read_text(encoding='utf8') + for s in rolePatterns: + check(s, t, f) + for s in parenPatterns: + check(s, t, f) + + f = 'any-role.html' + t = (app.outdir / f).read_text(encoding='utf8') + for s in parenPatterns: + check(s, t, f) + + +@pytest.mark.sphinx(testroot='domain-cpp') +def test_domain_cpp_build_xref_consistency(app, status, warning): + app.builder.build_all() + + test = 'xref_consistency.html' + output = (app.outdir / test).read_text(encoding='utf8') + + def classes(role, tag): + pattern = (fr'{role}-role:.*?' + fr'<(?P<tag>{tag}) .*?class=["\'](?P<classes>.*?)["\'].*?>' + r'.*' + r'</(?P=tag)>') + result = re.search(pattern, output) + expect = f'''\ +Pattern for role `{role}` with tag `{tag}` +\t{pattern} +not found in `{test}` +''' + assert result, expect + return set(result.group('classes').split()) + + class RoleClasses: + """Collect the classes from the layout that was generated for a given role.""" + + def __init__(self, role, root, contents): + self.name = role + self.classes = classes(role, root) + self.content_classes = {} + for tag in contents: + self.content_classes[tag] = classes(role, tag) + + # not actually used as a reference point + # code_role = RoleClasses('code', 'code', []) + any_role = RoleClasses('any', 'a', ['code']) + cpp_any_role = RoleClasses('cpp-any', 'a', ['code']) + # NYI: consistent looks + # texpr_role = RoleClasses('cpp-texpr', 'span', ['a', 'code']) + expr_role = RoleClasses('cpp-expr', 'span', ['a']) + texpr_role = RoleClasses('cpp-texpr', 'span', ['a', 'span']) + + # XRefRole-style classes + + # any and cpp:any do not put these classes at the root + + # n.b. the generic any machinery finds the specific 'cpp-class' object type + expect = 'any uses XRefRole classes' + assert {'xref', 'any', 'cpp', 'cpp-class'} <= any_role.content_classes['code'], expect + + expect = 'cpp:any uses XRefRole classes' + assert {'xref', 'cpp-any', 'cpp'} <= cpp_any_role.content_classes['code'], expect + + for role in (expr_role, texpr_role): + name = role.name + expect = f'`{name}` puts the domain and role classes at its root' + assert {'sig', 'sig-inline', 'cpp', name} <= role.classes, expect + + # reference classes + + expect = 'the xref roles use the same reference classes' + assert any_role.classes == cpp_any_role.classes, expect + assert any_role.classes == expr_role.content_classes['a'], expect + assert any_role.classes == texpr_role.content_classes['a'], expect + + +@pytest.mark.sphinx(testroot='domain-cpp', confoverrides={'nitpicky': True}) +def test_domain_cpp_build_field_role(app, status, warning): + app.builder.build_all() + ws = filter_warnings(warning, "field-role") + assert len(ws) == 0 + + +@pytest.mark.sphinx(testroot='domain-cpp-intersphinx', confoverrides={'nitpicky': True}) +def test_domain_cpp_build_intersphinx(tmp_path, app, status, warning): + origSource = """\ +.. cpp:class:: _class +.. cpp:struct:: _struct +.. cpp:union:: _union +.. cpp:function:: void _function() +.. cpp:member:: int _member +.. cpp:var:: int _var +.. cpp:type:: _type +.. cpp:concept:: template<typename T> _concept +.. cpp:enum:: _enum + + .. cpp:enumerator:: _enumerator + +.. cpp:enum-struct:: _enumStruct + + .. cpp:enumerator:: _scopedEnumerator + +.. cpp:enum-class:: _enumClass +.. cpp:function:: void _functionParam(int param) +.. cpp:function:: template<typename TParam> void _templateParam() +""" # noqa: F841 + inv_file = tmp_path / 'inventory' + inv_file.write_bytes(b'''\ +# Sphinx inventory version 2 +# Project: C Intersphinx Test +# Version: +# The remainder of this file is compressed using zlib. +''' + zlib.compress(b'''\ +_class cpp:class 1 index.html#_CPPv46$ - +_concept cpp:concept 1 index.html#_CPPv4I0E8$ - +_concept::T cpp:templateParam 1 index.html#_CPPv4I0E8_concept - +_enum cpp:enum 1 index.html#_CPPv45$ - +_enum::_enumerator cpp:enumerator 1 index.html#_CPPv4N5_enum11_enumeratorE - +_enumClass cpp:enum 1 index.html#_CPPv410$ - +_enumStruct cpp:enum 1 index.html#_CPPv411$ - +_enumStruct::_scopedEnumerator cpp:enumerator 1 index.html#_CPPv4N11_enumStruct17_scopedEnumeratorE - +_enumerator cpp:enumerator 1 index.html#_CPPv4N5_enum11_enumeratorE - +_function cpp:function 1 index.html#_CPPv49_functionv - +_functionParam cpp:function 1 index.html#_CPPv414_functionParami - +_functionParam::param cpp:functionParam 1 index.html#_CPPv414_functionParami - +_member cpp:member 1 index.html#_CPPv47$ - +_struct cpp:class 1 index.html#_CPPv47$ - +_templateParam cpp:function 1 index.html#_CPPv4I0E14_templateParamvv - +_templateParam::TParam cpp:templateParam 1 index.html#_CPPv4I0E14_templateParamvv - +_type cpp:type 1 index.html#_CPPv45$ - +_union cpp:union 1 index.html#_CPPv46$ - +_var cpp:member 1 index.html#_CPPv44$ - +''')) # noqa: W291 + app.config.intersphinx_mapping = { + 'https://localhost/intersphinx/cpp/': str(inv_file), + } + app.config.intersphinx_cache_limit = 0 + # load the inventory and check if it's done correctly + normalize_intersphinx_mapping(app, app.config) + load_mappings(app) + + app.builder.build_all() + ws = filter_warnings(warning, "index") + assert len(ws) == 0 + + +def test_domain_cpp_parse_no_index_entry(app): + text = (".. cpp:function:: void f()\n" + ".. cpp:function:: void g()\n" + " :no-index-entry:\n") + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, desc, addnodes.index, desc)) + assert_node(doctree[0], addnodes.index, entries=[('single', 'f (C++ function)', '_CPPv41fv', '', None)]) + assert_node(doctree[2], addnodes.index, entries=[]) + + +def test_domain_cpp_parse_mix_decl_duplicate(app, warning): + # Issue 8270 + text = (".. cpp:struct:: A\n" + ".. cpp:function:: void A()\n" + ".. cpp:struct:: A\n") + restructuredtext.parse(app, text) + ws = warning.getvalue().split("\n") + assert len(ws) == 5 + assert "index.rst:2: WARNING: Duplicate C++ declaration, also defined at index:1." in ws[0] + assert "Declaration is '.. cpp:function:: void A()'." in ws[1] + assert "index.rst:3: WARNING: Duplicate C++ declaration, also defined at index:1." in ws[2] + assert "Declaration is '.. cpp:struct:: A'." in ws[3] + assert ws[4] == "" + + +# For some reason, using the default testroot of "root" leads to the contents of +# `test-root/objects.txt` polluting the symbol table depending on the test +# execution order. Using a testroot of "config" seems to avoid that problem. +@pytest.mark.sphinx(testroot='config') +def test_domain_cpp_normalize_unspecialized_template_args(make_app, app_params): + args, kwargs = app_params + + text1 = (".. cpp:class:: template <typename T> A\n") + text2 = (".. cpp:class:: template <typename T> template <typename U> A<T>::B\n") + + app1 = make_app(*args, **kwargs) + restructuredtext.parse(app=app1, text=text1, docname='text1') + root1 = app1.env.domaindata['cpp']['root_symbol'] + + assert root1.dump(1) == ( + ' ::\n' + ' template<typename T> \n' + ' A: {class} template<typename T> A\t(text1)\n' + ' T: {templateParam} typename T\t(text1)\n' + ) + + app2 = make_app(*args, **kwargs) + restructuredtext.parse(app=app2, text=text2, docname='text2') + root2 = app2.env.domaindata['cpp']['root_symbol'] + + assert root2.dump(1) == ( + ' ::\n' + ' template<typename T> \n' + ' A\n' + ' T\n' + ' template<typename U> \n' + ' B: {class} template<typename T> template<typename U> A<T>::B\t(text2)\n' + ' U: {templateParam} typename U\t(text2)\n' + ) + + root2.merge_with(root1, ['text1'], app2.env) + + assert root2.dump(1) == ( + ' ::\n' + ' template<typename T> \n' + ' A: {class} template<typename T> A\t(text1)\n' + ' T: {templateParam} typename T\t(text1)\n' + ' template<typename U> \n' + ' B: {class} template<typename T> template<typename U> A<T>::B\t(text2)\n' + ' U: {templateParam} typename U\t(text2)\n' + ) + warning = app2._warning.getvalue() + assert 'Internal C++ domain error during symbol merging' not in warning + + +@pytest.mark.sphinx('html', confoverrides={ + 'cpp_maximum_signature_line_length': len('str hello(str name)'), +}) +def test_cpp_function_signature_with_cpp_maximum_signature_line_length_equal(app): + text = '.. cpp:function:: str hello(str name)' + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_signature_line, ( + pending_xref, + desc_sig_space, + [desc_name, [desc_sig_name, 'hello']], + desc_parameterlist, + )], + )], + desc_content, + )], + )) + assert_node(doctree[1], addnodes.desc, desctype='function', + domain='cpp', objtype='function', no_index=False) + assert_node(doctree[1][0][0][3], [desc_parameterlist, desc_parameter, ( + [pending_xref, [desc_sig_name, 'str']], + desc_sig_space, + [desc_sig_name, 'name'], + )]) + assert_node(doctree[1][0][0][3], desc_parameterlist, multi_line_parameter_list=False) + + +@pytest.mark.sphinx('html', confoverrides={ + 'cpp_maximum_signature_line_length': len('str hello(str name)'), +}) +def test_cpp_function_signature_with_cpp_maximum_signature_line_length_force_single(app): + text = ('.. cpp:function:: str hello(str names)\n' + ' :single-line-parameter-list:') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_signature_line, ( + pending_xref, + desc_sig_space, + [desc_name, [desc_sig_name, 'hello']], + desc_parameterlist, + )], + )], + desc_content, + )], + )) + assert_node(doctree[1], addnodes.desc, desctype='function', + domain='cpp', objtype='function', no_index=False) + assert_node(doctree[1][0][0][3], [desc_parameterlist, desc_parameter, ( + [pending_xref, [desc_sig_name, 'str']], + desc_sig_space, + [desc_sig_name, 'names']), + ]) + assert_node(doctree[1][0][0][3], desc_parameterlist, multi_line_parameter_list=False) + + +@pytest.mark.sphinx('html', confoverrides={ + 'cpp_maximum_signature_line_length': len("str hello(str name)"), +}) +def test_cpp_function_signature_with_cpp_maximum_signature_line_length_break(app): + text = '.. cpp:function:: str hello(str names)' + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_signature_line, ( + pending_xref, + desc_sig_space, + [desc_name, [desc_sig_name, 'hello']], + desc_parameterlist, + )], + )], + desc_content, + )], + )) + assert_node(doctree[1], addnodes.desc, desctype='function', + domain='cpp', objtype='function', no_index=False) + assert_node(doctree[1][0][0][3], [desc_parameterlist, desc_parameter, ( + [pending_xref, [desc_sig_name, 'str']], + desc_sig_space, + [desc_sig_name, 'names']), + ]) + assert_node(doctree[1][0][0][3], desc_parameterlist, multi_line_parameter_list=True) + + +@pytest.mark.sphinx('html', confoverrides={ + 'maximum_signature_line_length': len('str hello(str name)'), +}) +def test_cpp_function_signature_with_maximum_signature_line_length_equal(app): + text = '.. cpp:function:: str hello(str name)' + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_signature_line, ( + pending_xref, + desc_sig_space, + [desc_name, [desc_sig_name, 'hello']], + desc_parameterlist, + )], + )], + desc_content, + )], + )) + assert_node(doctree[1], addnodes.desc, desctype='function', + domain='cpp', objtype='function', no_index=False) + assert_node(doctree[1][0][0][3], [desc_parameterlist, desc_parameter, ( + [pending_xref, [desc_sig_name, 'str']], + desc_sig_space, + [desc_sig_name, 'name'], + )]) + assert_node(doctree[1][0][0][3], desc_parameterlist, multi_line_parameter_list=False) + + +@pytest.mark.sphinx('html', confoverrides={ + 'maximum_signature_line_length': len('str hello(str name)'), +}) +def test_cpp_function_signature_with_maximum_signature_line_length_force_single(app): + text = ('.. cpp:function:: str hello(str names)\n' + ' :single-line-parameter-list:') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_signature_line, ( + pending_xref, + desc_sig_space, + [desc_name, [desc_sig_name, 'hello']], + desc_parameterlist, + )], + )], + desc_content, + )], + )) + assert_node(doctree[1], addnodes.desc, desctype='function', + domain='cpp', objtype='function', no_index=False) + assert_node(doctree[1][0][0][3], [desc_parameterlist, desc_parameter, ( + [pending_xref, [desc_sig_name, 'str']], + desc_sig_space, + [desc_sig_name, 'names']), + ]) + assert_node(doctree[1][0][0][3], desc_parameterlist, multi_line_parameter_list=False) + + +@pytest.mark.sphinx('html', confoverrides={ + 'maximum_signature_line_length': len("str hello(str name)"), +}) +def test_cpp_function_signature_with_maximum_signature_line_length_break(app): + text = '.. cpp:function:: str hello(str names)' + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_signature_line, ( + pending_xref, + desc_sig_space, + [desc_name, [desc_sig_name, 'hello']], + desc_parameterlist, + )], + )], + desc_content, + )], + )) + assert_node(doctree[1], addnodes.desc, desctype='function', + domain='cpp', objtype='function', no_index=False) + assert_node(doctree[1][0][0][3], [desc_parameterlist, desc_parameter, ( + [pending_xref, [desc_sig_name, 'str']], + desc_sig_space, + [desc_sig_name, 'names']), + ]) + assert_node(doctree[1][0][0][3], desc_parameterlist, multi_line_parameter_list=True) + + +@pytest.mark.sphinx('html', confoverrides={ + 'cpp_maximum_signature_line_length': len('str hello(str name)'), + 'maximum_signature_line_length': 1, +}) +def test_cpp_maximum_signature_line_length_overrides_global(app): + text = '.. cpp:function:: str hello(str name)' + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ([desc_signature, ([desc_signature_line, (pending_xref, + desc_sig_space, + [desc_name, [desc_sig_name, "hello"]], + desc_parameterlist)])], + desc_content)], + )) + assert_node(doctree[1], addnodes.desc, desctype='function', + domain='cpp', objtype='function', no_index=False) + assert_node(doctree[1][0][0][3], [desc_parameterlist, desc_parameter, ( + [pending_xref, [desc_sig_name, 'str']], + desc_sig_space, + [desc_sig_name, 'name'], + )]) + assert_node(doctree[1][0][0][3], desc_parameterlist, multi_line_parameter_list=False) + + +@pytest.mark.sphinx('html', testroot='domain-cpp-cpp_maximum_signature_line_length') +def test_domain_cpp_cpp_maximum_signature_line_length_in_html(app, status, warning): + app.build() + content = (app.outdir / 'index.html').read_text(encoding='utf-8') + expected = """\ + +<dl> +<dd>\ +<span class="n"><span class="pre">str</span></span>\ +<span class="w"> </span>\ +<span class="n sig-param"><span class="pre">name</span></span>,\ +</dd> +</dl> + +<span class="sig-paren">)</span>\ +<a class="headerlink" href=\ +""" + assert expected in content + + +@pytest.mark.sphinx( + 'text', testroot='domain-cpp-cpp_maximum_signature_line_length', +) +def test_domain_cpp_cpp_maximum_signature_line_length_in_text(app, status, warning): + app.build() + content = (app.outdir / 'index.txt').read_text(encoding='utf8') + param_line_fmt = STDINDENT * " " + "{}\n" + + expected_parameter_list_hello = "(\n{})".format(param_line_fmt.format("str name,")) + + assert expected_parameter_list_hello in content diff --git a/tests/test_domain_js.py b/tests/test_domain_js.py new file mode 100644 index 0000000..bf4c3fe --- /dev/null +++ b/tests/test_domain_js.py @@ -0,0 +1,505 @@ +"""Tests the JavaScript Domain""" + +from unittest.mock import Mock + +import docutils.utils +import pytest +from docutils import nodes + +from sphinx import addnodes +from sphinx.addnodes import ( + desc, + desc_annotation, + desc_content, + desc_name, + desc_parameter, + desc_parameterlist, + desc_sig_keyword, + desc_sig_name, + desc_sig_space, + desc_signature, +) +from sphinx.domains.javascript import JavaScriptDomain +from sphinx.testing import restructuredtext +from sphinx.testing.util import assert_node +from sphinx.writers.text import STDINDENT + + +@pytest.mark.sphinx('dummy', testroot='domain-js') +def test_domain_js_xrefs(app, status, warning): + """Domain objects have correct prefixes when looking up xrefs""" + app.builder.build_all() + + def assert_refnode(node, mod_name, prefix, target, reftype=None, + domain='js'): + attributes = { + 'refdomain': domain, + 'reftarget': target, + } + if reftype is not None: + attributes['reftype'] = reftype + if mod_name is not False: + attributes['js:module'] = mod_name + if prefix is not False: + attributes['js:object'] = prefix + assert_node(node, **attributes) + + doctree = app.env.get_doctree('roles') + refnodes = list(doctree.findall(addnodes.pending_xref)) + assert_refnode(refnodes[0], None, None, 'TopLevel', 'class') + assert_refnode(refnodes[1], None, None, 'top_level', 'func') + assert_refnode(refnodes[2], None, 'NestedParentA', 'child_1', 'func') + assert_refnode(refnodes[3], None, 'NestedParentA', 'NestedChildA.subchild_2', 'func') + assert_refnode(refnodes[4], None, 'NestedParentA', 'child_2', 'func') + assert_refnode(refnodes[5], False, 'NestedParentA', 'any_child', domain='') + assert_refnode(refnodes[6], None, 'NestedParentA', 'NestedChildA', 'class') + assert_refnode(refnodes[7], None, 'NestedParentA.NestedChildA', 'subchild_2', 'func') + assert_refnode(refnodes[8], None, 'NestedParentA.NestedChildA', + 'NestedParentA.child_1', 'func') + assert_refnode(refnodes[9], None, 'NestedParentA', 'NestedChildA.subchild_1', 'func') + assert_refnode(refnodes[10], None, 'NestedParentB', 'child_1', 'func') + assert_refnode(refnodes[11], None, 'NestedParentB', 'NestedParentB', 'class') + assert_refnode(refnodes[12], None, None, 'NestedParentA.NestedChildA', 'class') + assert len(refnodes) == 13 + + doctree = app.env.get_doctree('module') + refnodes = list(doctree.findall(addnodes.pending_xref)) + assert_refnode(refnodes[0], 'module_a.submodule', None, 'ModTopLevel', + 'class') + assert_refnode(refnodes[1], 'module_a.submodule', 'ModTopLevel', + 'mod_child_1', 'meth') + assert_refnode(refnodes[2], 'module_a.submodule', 'ModTopLevel', + 'ModTopLevel.mod_child_1', 'meth') + assert_refnode(refnodes[3], 'module_a.submodule', 'ModTopLevel', + 'mod_child_2', 'meth') + assert_refnode(refnodes[4], 'module_a.submodule', 'ModTopLevel', + 'module_a.submodule.ModTopLevel.mod_child_1', 'meth') + assert_refnode(refnodes[5], 'module_b.submodule', None, 'ModTopLevel', + 'class') + assert_refnode(refnodes[6], 'module_b.submodule', 'ModTopLevel', + 'module_a.submodule', 'mod') + assert len(refnodes) == 7 + + +@pytest.mark.sphinx('dummy', testroot='domain-js') +def test_domain_js_objects(app, status, warning): + app.builder.build_all() + + modules = app.env.domains['js'].data['modules'] + objects = app.env.domains['js'].data['objects'] + + assert 'module_a.submodule' in modules + assert 'module_a.submodule' in objects + assert 'module_b.submodule' in modules + assert 'module_b.submodule' in objects + + assert objects['module_a.submodule.ModTopLevel'][2] == 'class' + assert objects['module_a.submodule.ModTopLevel.mod_child_1'][2] == 'method' + assert objects['module_a.submodule.ModTopLevel.mod_child_2'][2] == 'method' + assert objects['module_b.submodule.ModTopLevel'][2] == 'class' + + assert objects['TopLevel'][2] == 'class' + assert objects['top_level'][2] == 'function' + assert objects['NestedParentA'][2] == 'class' + assert objects['NestedParentA.child_1'][2] == 'function' + assert objects['NestedParentA.any_child'][2] == 'function' + assert objects['NestedParentA.NestedChildA'][2] == 'class' + assert objects['NestedParentA.NestedChildA.subchild_1'][2] == 'function' + assert objects['NestedParentA.NestedChildA.subchild_2'][2] == 'function' + assert objects['NestedParentA.child_2'][2] == 'function' + assert objects['NestedParentB'][2] == 'class' + assert objects['NestedParentB.child_1'][2] == 'function' + + +@pytest.mark.sphinx('dummy', testroot='domain-js') +def test_domain_js_find_obj(app, status, warning): + + def find_obj(mod_name, prefix, obj_name, obj_type, searchmode=0): + return app.env.domains['js'].find_obj( + app.env, mod_name, prefix, obj_name, obj_type, searchmode) + + app.builder.build_all() + + assert (find_obj(None, None, 'NONEXISTANT', 'class') == (None, None)) + assert (find_obj(None, None, 'NestedParentA', 'class') == + ('NestedParentA', ('roles', 'NestedParentA', 'class'))) + assert (find_obj(None, None, 'NestedParentA.NestedChildA', 'class') == + ('NestedParentA.NestedChildA', + ('roles', 'NestedParentA.NestedChildA', 'class'))) + assert (find_obj(None, 'NestedParentA', 'NestedChildA', 'class') == + ('NestedParentA.NestedChildA', + ('roles', 'NestedParentA.NestedChildA', 'class'))) + assert (find_obj(None, None, 'NestedParentA.NestedChildA.subchild_1', 'func') == + ('NestedParentA.NestedChildA.subchild_1', + ('roles', 'NestedParentA.NestedChildA.subchild_1', 'function'))) + assert (find_obj(None, 'NestedParentA', 'NestedChildA.subchild_1', 'func') == + ('NestedParentA.NestedChildA.subchild_1', + ('roles', 'NestedParentA.NestedChildA.subchild_1', 'function'))) + assert (find_obj(None, 'NestedParentA.NestedChildA', 'subchild_1', 'func') == + ('NestedParentA.NestedChildA.subchild_1', + ('roles', 'NestedParentA.NestedChildA.subchild_1', 'function'))) + assert (find_obj('module_a.submodule', 'ModTopLevel', 'mod_child_2', 'meth') == + ('module_a.submodule.ModTopLevel.mod_child_2', + ('module', 'module_a.submodule.ModTopLevel.mod_child_2', 'method'))) + assert (find_obj('module_b.submodule', 'ModTopLevel', 'module_a.submodule', 'mod') == + ('module_a.submodule', + ('module', 'module-module_a.submodule', 'module'))) + + +def test_get_full_qualified_name(): + env = Mock(domaindata={}) + domain = JavaScriptDomain(env) + + # non-js references + node = nodes.reference() + assert domain.get_full_qualified_name(node) is None + + # simple reference + node = nodes.reference(reftarget='func') + assert domain.get_full_qualified_name(node) == 'func' + + # with js:module context + kwargs = {'js:module': 'module1'} + node = nodes.reference(reftarget='func', **kwargs) + assert domain.get_full_qualified_name(node) == 'module1.func' + + # with js:object context + kwargs = {'js:object': 'Class'} + node = nodes.reference(reftarget='func', **kwargs) + assert domain.get_full_qualified_name(node) == 'Class.func' + + # with both js:module and js:object context + kwargs = {'js:module': 'module1', 'js:object': 'Class'} + node = nodes.reference(reftarget='func', **kwargs) + assert domain.get_full_qualified_name(node) == 'module1.Class.func' + + +def test_js_module(app): + text = ".. js:module:: sphinx" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + nodes.target)) + assert_node(doctree[0], addnodes.index, + entries=[("single", "sphinx (module)", "module-sphinx", "", None)]) + assert_node(doctree[1], nodes.target, ids=["module-sphinx"]) + + +def test_js_function(app): + text = ".. js:function:: sum(a, b)" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_name, ([desc_sig_name, "sum"])], + desc_parameterlist)], + [desc_content, ()])])) + assert_node(doctree[1][0][1], [desc_parameterlist, ([desc_parameter, ([desc_sig_name, "a"])], + [desc_parameter, ([desc_sig_name, "b"])])]) + assert_node(doctree[0], addnodes.index, + entries=[("single", "sum() (built-in function)", "sum", "", None)]) + assert_node(doctree[1], addnodes.desc, domain="js", objtype="function", no_index=False) + + +def test_js_class(app): + text = ".. js:class:: Application" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_annotation, ([desc_sig_keyword, 'class'], + desc_sig_space)], + [desc_name, ([desc_sig_name, "Application"])], + [desc_parameterlist, ()])], + [desc_content, ()])])) + assert_node(doctree[0], addnodes.index, + entries=[("single", "Application() (class)", "Application", "", None)]) + assert_node(doctree[1], addnodes.desc, domain="js", objtype="class", no_index=False) + + +def test_js_data(app): + text = ".. js:data:: name" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_name, ([desc_sig_name, "name"])])], + [desc_content, ()])])) + assert_node(doctree[0], addnodes.index, + entries=[("single", "name (global variable or constant)", "name", "", None)]) + assert_node(doctree[1], addnodes.desc, domain="js", objtype="data", no_index=False) + + +def test_no_index_entry(app): + text = (".. js:function:: f()\n" + ".. js:function:: g()\n" + " :no-index-entry:\n") + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, desc, addnodes.index, desc)) + assert_node(doctree[0], addnodes.index, entries=[('single', 'f() (built-in function)', 'f', '', None)]) + assert_node(doctree[2], addnodes.index, entries=[]) + + +def test_module_content_line_number(app): + text = (".. js:module:: foo\n" + + "\n" + + " Some link here: :ref:`abc`\n") + doc = restructuredtext.parse(app, text) + xrefs = list(doc.findall(condition=addnodes.pending_xref)) + assert len(xrefs) == 1 + source, line = docutils.utils.get_source_line(xrefs[0]) + assert 'index.rst' in source + assert line == 3 + + +@pytest.mark.sphinx('html', confoverrides={ + 'javascript_maximum_signature_line_length': len("hello(name)"), +}) +def test_jsfunction_signature_with_javascript_maximum_signature_line_length_equal(app): + text = ".. js:function:: hello(name)" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_name, ([desc_sig_name, "hello"])], + desc_parameterlist, + )], + desc_content, + )], + )) + assert_node(doctree[1], desc, desctype="function", + domain="js", objtype="function", no_index=False) + assert_node(doctree[1][0][1], + [desc_parameterlist, desc_parameter, ([desc_sig_name, "name"])]) + assert_node(doctree[1][0][1], desc_parameterlist, multi_line_parameter_list=False) + + +@pytest.mark.sphinx('html', confoverrides={ + 'javascript_maximum_signature_line_length': len("hello(name)"), +}) +def test_jsfunction_signature_with_javascript_maximum_signature_line_length_force_single(app): + text = (".. js:function:: hello(names)\n" + " :single-line-parameter-list:") + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_name, ([desc_sig_name, "hello"])], + desc_parameterlist, + )], + desc_content, + )], + )) + assert_node(doctree[1], desc, desctype="function", + domain="js", objtype="function", no_index=False) + assert_node(doctree[1][0][1], + [desc_parameterlist, desc_parameter, ([desc_sig_name, "names"])]) + assert_node(doctree[1][0][1], desc_parameterlist, multi_line_parameter_list=False) + + +@pytest.mark.sphinx('html', confoverrides={ + 'javascript_maximum_signature_line_length': len("hello(name)"), +}) +def test_jsfunction_signature_with_javascript_maximum_signature_line_length_break(app): + text = ".. js:function:: hello(names)" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_name, ([desc_sig_name, "hello"])], + desc_parameterlist, + )], + desc_content, + )], + )) + assert_node(doctree[1], desc, desctype="function", + domain="js", objtype="function", no_index=False) + assert_node(doctree[1][0][1], + [desc_parameterlist, desc_parameter, ([desc_sig_name, "names"])]) + assert_node(doctree[1][0][1], desc_parameterlist, multi_line_parameter_list=True) + + +@pytest.mark.sphinx('html', confoverrides={ + 'maximum_signature_line_length': len("hello(name)"), +}) +def test_jsfunction_signature_with_maximum_signature_line_length_equal(app): + text = ".. js:function:: hello(name)" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_name, ([desc_sig_name, "hello"])], + desc_parameterlist, + )], + desc_content, + )], + )) + assert_node(doctree[1], desc, desctype="function", + domain="js", objtype="function", no_index=False) + assert_node(doctree[1][0][1], + [desc_parameterlist, desc_parameter, ([desc_sig_name, "name"])]) + assert_node(doctree[1][0][1], desc_parameterlist, multi_line_parameter_list=False) + + +@pytest.mark.sphinx('html', confoverrides={ + 'maximum_signature_line_length': len("hello(name)"), +}) +def test_jsfunction_signature_with_maximum_signature_line_length_force_single(app): + text = (".. js:function:: hello(names)\n" + " :single-line-parameter-list:") + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_name, ([desc_sig_name, "hello"])], + desc_parameterlist, + )], + desc_content, + )], + )) + assert_node(doctree[1], desc, desctype="function", + domain="js", objtype="function", no_index=False) + assert_node(doctree[1][0][1], + [desc_parameterlist, desc_parameter, ([desc_sig_name, "names"])]) + assert_node(doctree[1][0][1], desc_parameterlist, multi_line_parameter_list=False) + + +@pytest.mark.sphinx('html', confoverrides={ + 'maximum_signature_line_length': len("hello(name)"), +}) +def test_jsfunction_signature_with_maximum_signature_line_length_break(app): + text = ".. js:function:: hello(names)" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_name, ([desc_sig_name, "hello"])], + desc_parameterlist, + )], + desc_content, + )], + )) + assert_node(doctree[1], desc, desctype="function", + domain="js", objtype="function", no_index=False) + assert_node(doctree[1][0][1], + [desc_parameterlist, desc_parameter, ([desc_sig_name, "names"])]) + assert_node(doctree[1][0][1], desc_parameterlist, multi_line_parameter_list=True) + + +@pytest.mark.sphinx( + 'html', + confoverrides={ + 'javascript_maximum_signature_line_length': len("hello(name)"), + 'maximum_signature_line_length': 1, + }, +) +def test_javascript_maximum_signature_line_length_overrides_global(app): + text = ".. js:function:: hello(name)" + doctree = restructuredtext.parse(app, text) + expected_doctree = (addnodes.index, + [desc, ([desc_signature, ([desc_name, ([desc_sig_name, "hello"])], + desc_parameterlist)], + desc_content)]) + assert_node(doctree, expected_doctree) + assert_node(doctree[1], desc, desctype="function", + domain="js", objtype="function", no_index=False) + expected_sig = [desc_parameterlist, desc_parameter, [desc_sig_name, "name"]] + assert_node(doctree[1][0][1], expected_sig) + assert_node(doctree[1][0][1], desc_parameterlist, multi_line_parameter_list=False) + + +@pytest.mark.sphinx( + 'html', testroot='domain-js-javascript_maximum_signature_line_length', +) +def test_domain_js_javascript_maximum_signature_line_length_in_html(app, status, warning): + app.build() + content = (app.outdir / 'index.html').read_text(encoding='utf8') + expected_parameter_list_hello = """\ + +<dl> +<dd>\ +<em class="sig-param">\ +<span class="n"><span class="pre">name</span></span>\ +</em>,\ +</dd> +</dl> + +<span class="sig-paren">)</span>\ +<a class="headerlink" href="#hello" title="Link to this definition">¶</a>\ +</dt>\ +""" + assert expected_parameter_list_hello in content + + param_line_fmt = '<dd>{}</dd>\n' + param_name_fmt = ( + '<em class="sig-param"><span class="n"><span class="pre">{}</span></span></em>' + ) + optional_fmt = '<span class="optional">{}</span>' + + expected_a = param_line_fmt.format( + optional_fmt.format("[") + param_name_fmt.format("a") + "," + optional_fmt.format("["), + ) + assert expected_a in content + + expected_b = param_line_fmt.format( + param_name_fmt.format("b") + "," + optional_fmt.format("]") + optional_fmt.format("]"), + ) + assert expected_b in content + + expected_c = param_line_fmt.format(param_name_fmt.format("c") + ",") + assert expected_c in content + + expected_d = param_line_fmt.format(param_name_fmt.format("d") + optional_fmt.format("[") + ",") + assert expected_d in content + + expected_e = param_line_fmt.format(param_name_fmt.format("e") + ",") + assert expected_e in content + + expected_f = param_line_fmt.format(param_name_fmt.format("f") + "," + optional_fmt.format("]")) + assert expected_f in content + + expected_parameter_list_foo = """\ + +<dl> +{}{}{}{}{}{}</dl> + +<span class="sig-paren">)</span>\ +<a class="headerlink" href="#foo" title="Link to this definition">¶</a>\ +</dt>\ +""".format(expected_a, expected_b, expected_c, expected_d, expected_e, expected_f) + assert expected_parameter_list_foo in content + + +@pytest.mark.sphinx( + 'text', testroot='domain-js-javascript_maximum_signature_line_length', +) +def test_domain_js_javascript_maximum_signature_line_length_in_text(app, status, warning): + app.build() + content = (app.outdir / 'index.txt').read_text(encoding='utf8') + param_line_fmt = STDINDENT * " " + "{}\n" + + expected_parameter_list_hello = "(\n{})".format(param_line_fmt.format("name,")) + + assert expected_parameter_list_hello in content + + expected_a = param_line_fmt.format("[a,[") + assert expected_a in content + + expected_b = param_line_fmt.format("b,]]") + assert expected_b in content + + expected_c = param_line_fmt.format("c,") + assert expected_c in content + + expected_d = param_line_fmt.format("d[,") + assert expected_d in content + + expected_e = param_line_fmt.format("e,") + assert expected_e in content + + expected_f = param_line_fmt.format("f,]") + assert expected_f in content + + expected_parameter_list_foo = "(\n{}{}{}{}{}{})".format( + expected_a, expected_b, expected_c, expected_d, expected_e, expected_f, + ) + assert expected_parameter_list_foo in content diff --git a/tests/test_domain_py.py b/tests/test_domain_py.py new file mode 100644 index 0000000..c5a044b --- /dev/null +++ b/tests/test_domain_py.py @@ -0,0 +1,2123 @@ +"""Tests the Python Domain""" + +from __future__ import annotations + +import re +from unittest.mock import Mock + +import docutils.utils +import pytest +from docutils import nodes + +from sphinx import addnodes +from sphinx.addnodes import ( + desc, + desc_addname, + desc_annotation, + desc_content, + desc_name, + desc_optional, + desc_parameter, + desc_parameterlist, + desc_returns, + desc_sig_keyword, + desc_sig_literal_number, + desc_sig_literal_string, + desc_sig_name, + desc_sig_operator, + desc_sig_punctuation, + desc_sig_space, + desc_signature, + desc_type_parameter, + desc_type_parameter_list, + pending_xref, +) +from sphinx.domains import IndexEntry +from sphinx.domains.python import ( + PythonDomain, + PythonModuleIndex, + _parse_annotation, + _pseudo_parse_arglist, + py_sig_re, +) +from sphinx.testing import restructuredtext +from sphinx.testing.util import assert_node +from sphinx.writers.text import STDINDENT + + +def parse(sig): + m = py_sig_re.match(sig) + if m is None: + raise ValueError + name_prefix, tp_list, name, arglist, retann = m.groups() + signode = addnodes.desc_signature(sig, '') + _pseudo_parse_arglist(signode, arglist) + return signode.astext() + + +def test_function_signatures(): + rv = parse('func(a=1) -> int object') + assert rv == '(a=1)' + + rv = parse('func(a=1, [b=None])') + assert rv == '(a=1, [b=None])' + + rv = parse('func(a=1[, b=None])') + assert rv == '(a=1, [b=None])' + + rv = parse("compile(source : string, filename, symbol='file')") + assert rv == "(source : string, filename, symbol='file')" + + rv = parse('func(a=[], [b=None])') + assert rv == '(a=[], [b=None])' + + rv = parse('func(a=[][, b=None])') + assert rv == '(a=[], [b=None])' + + +@pytest.mark.sphinx('dummy', testroot='domain-py') +def test_domain_py_xrefs(app, status, warning): + """Domain objects have correct prefixes when looking up xrefs""" + app.builder.build_all() + + def assert_refnode(node, module_name, class_name, target, reftype=None, + domain='py'): + attributes = { + 'refdomain': domain, + 'reftarget': target, + } + if reftype is not None: + attributes['reftype'] = reftype + if module_name is not False: + attributes['py:module'] = module_name + if class_name is not False: + attributes['py:class'] = class_name + assert_node(node, **attributes) + + doctree = app.env.get_doctree('roles') + refnodes = list(doctree.findall(pending_xref)) + assert_refnode(refnodes[0], None, None, 'TopLevel', 'class') + assert_refnode(refnodes[1], None, None, 'top_level', 'meth') + assert_refnode(refnodes[2], None, 'NestedParentA', 'child_1', 'meth') + assert_refnode(refnodes[3], None, 'NestedParentA', 'NestedChildA.subchild_2', 'meth') + assert_refnode(refnodes[4], None, 'NestedParentA', 'child_2', 'meth') + assert_refnode(refnodes[5], False, 'NestedParentA', 'any_child', domain='') + assert_refnode(refnodes[6], None, 'NestedParentA', 'NestedChildA', 'class') + assert_refnode(refnodes[7], None, 'NestedParentA.NestedChildA', 'subchild_2', 'meth') + assert_refnode(refnodes[8], None, 'NestedParentA.NestedChildA', + 'NestedParentA.child_1', 'meth') + assert_refnode(refnodes[9], None, 'NestedParentA', 'NestedChildA.subchild_1', 'meth') + assert_refnode(refnodes[10], None, 'NestedParentB', 'child_1', 'meth') + assert_refnode(refnodes[11], None, 'NestedParentB', 'NestedParentB', 'class') + assert_refnode(refnodes[12], None, None, 'NestedParentA.NestedChildA', 'class') + assert len(refnodes) == 13 + + doctree = app.env.get_doctree('module') + refnodes = list(doctree.findall(pending_xref)) + assert_refnode(refnodes[0], 'module_a.submodule', None, + 'ModTopLevel', 'class') + assert_refnode(refnodes[1], 'module_a.submodule', 'ModTopLevel', + 'mod_child_1', 'meth') + assert_refnode(refnodes[2], 'module_a.submodule', 'ModTopLevel', + 'ModTopLevel.mod_child_1', 'meth') + assert_refnode(refnodes[3], 'module_a.submodule', 'ModTopLevel', + 'mod_child_2', 'meth') + assert_refnode(refnodes[4], 'module_a.submodule', 'ModTopLevel', + 'module_a.submodule.ModTopLevel.mod_child_1', 'meth') + assert_refnode(refnodes[5], 'module_a.submodule', 'ModTopLevel', + 'prop', 'attr') + assert_refnode(refnodes[6], 'module_a.submodule', 'ModTopLevel', + 'prop', 'meth') + assert_refnode(refnodes[7], 'module_b.submodule', None, + 'ModTopLevel', 'class') + assert_refnode(refnodes[8], 'module_b.submodule', 'ModTopLevel', + 'ModNoModule', 'class') + assert_refnode(refnodes[9], False, False, 'int', 'class') + assert_refnode(refnodes[10], False, False, 'tuple', 'class') + assert_refnode(refnodes[11], False, False, 'str', 'class') + assert_refnode(refnodes[12], False, False, 'float', 'class') + assert_refnode(refnodes[13], False, False, 'list', 'class') + assert_refnode(refnodes[14], False, False, 'ModTopLevel', 'class') + assert_refnode(refnodes[15], False, False, 'index', 'doc', domain='std') + assert len(refnodes) == 16 + + doctree = app.env.get_doctree('module_option') + refnodes = list(doctree.findall(pending_xref)) + print(refnodes) + print(refnodes[0]) + print(refnodes[1]) + assert_refnode(refnodes[0], 'test.extra', 'B', 'foo', 'meth') + assert_refnode(refnodes[1], 'test.extra', 'B', 'foo', 'meth') + assert len(refnodes) == 2 + + +@pytest.mark.sphinx('html', testroot='domain-py') +def test_domain_py_xrefs_abbreviations(app, status, warning): + app.builder.build_all() + + content = (app.outdir / 'abbr.html').read_text(encoding='utf8') + assert re.search(r'normal: <a .* href="module.html#module_a.submodule.ModTopLevel.' + r'mod_child_1" .*><.*>module_a.submodule.ModTopLevel.mod_child_1\(\)' + r'<.*></a>', + content) + assert re.search(r'relative: <a .* href="module.html#module_a.submodule.ModTopLevel.' + r'mod_child_1" .*><.*>ModTopLevel.mod_child_1\(\)<.*></a>', + content) + assert re.search(r'short name: <a .* href="module.html#module_a.submodule.ModTopLevel.' + r'mod_child_1" .*><.*>mod_child_1\(\)<.*></a>', + content) + assert re.search(r'relative \+ short name: <a .* href="module.html#module_a.submodule.' + r'ModTopLevel.mod_child_1" .*><.*>mod_child_1\(\)<.*></a>', + content) + assert re.search(r'short name \+ relative: <a .* href="module.html#module_a.submodule.' + r'ModTopLevel.mod_child_1" .*><.*>mod_child_1\(\)<.*></a>', + content) + + +@pytest.mark.sphinx('dummy', testroot='domain-py') +def test_domain_py_objects(app, status, warning): + app.builder.build_all() + + modules = app.env.domains['py'].data['modules'] + objects = app.env.domains['py'].data['objects'] + + assert 'module_a.submodule' in modules + assert 'module_a.submodule' in objects + assert 'module_b.submodule' in modules + assert 'module_b.submodule' in objects + + assert objects['module_a.submodule.ModTopLevel'][2] == 'class' + assert objects['module_a.submodule.ModTopLevel.mod_child_1'][2] == 'method' + assert objects['module_a.submodule.ModTopLevel.mod_child_2'][2] == 'method' + assert 'ModTopLevel.ModNoModule' not in objects + assert objects['ModNoModule'][2] == 'class' + assert objects['module_b.submodule.ModTopLevel'][2] == 'class' + + assert objects['TopLevel'][2] == 'class' + assert objects['top_level'][2] == 'method' + assert objects['NestedParentA'][2] == 'class' + assert objects['NestedParentA.child_1'][2] == 'method' + assert objects['NestedParentA.any_child'][2] == 'method' + assert objects['NestedParentA.NestedChildA'][2] == 'class' + assert objects['NestedParentA.NestedChildA.subchild_1'][2] == 'method' + assert objects['NestedParentA.NestedChildA.subchild_2'][2] == 'method' + assert objects['NestedParentA.child_2'][2] == 'method' + assert objects['NestedParentB'][2] == 'class' + assert objects['NestedParentB.child_1'][2] == 'method' + + +@pytest.mark.sphinx('html', testroot='domain-py') +def test_resolve_xref_for_properties(app, status, warning): + app.builder.build_all() + + content = (app.outdir / 'module.html').read_text(encoding='utf8') + assert ('Link to <a class="reference internal" href="#module_a.submodule.ModTopLevel.prop"' + ' title="module_a.submodule.ModTopLevel.prop">' + '<code class="xref py py-attr docutils literal notranslate"><span class="pre">' + 'prop</span> <span class="pre">attribute</span></code></a>' in content) + assert ('Link to <a class="reference internal" href="#module_a.submodule.ModTopLevel.prop"' + ' title="module_a.submodule.ModTopLevel.prop">' + '<code class="xref py py-meth docutils literal notranslate"><span class="pre">' + 'prop</span> <span class="pre">method</span></code></a>' in content) + assert ('Link to <a class="reference internal" href="#module_a.submodule.ModTopLevel.prop"' + ' title="module_a.submodule.ModTopLevel.prop">' + '<code class="xref py py-attr docutils literal notranslate"><span class="pre">' + 'prop</span> <span class="pre">attribute</span></code></a>' in content) + + +@pytest.mark.sphinx('dummy', testroot='domain-py') +def test_domain_py_find_obj(app, status, warning): + + def find_obj(modname, prefix, obj_name, obj_type, searchmode=0): + return app.env.domains['py'].find_obj( + app.env, modname, prefix, obj_name, obj_type, searchmode) + + app.builder.build_all() + + assert (find_obj(None, None, 'NONEXISTANT', 'class') == []) + assert (find_obj(None, None, 'NestedParentA', 'class') == + [('NestedParentA', ('roles', 'NestedParentA', 'class', False))]) + assert (find_obj(None, None, 'NestedParentA.NestedChildA', 'class') == + [('NestedParentA.NestedChildA', + ('roles', 'NestedParentA.NestedChildA', 'class', False))]) + assert (find_obj(None, 'NestedParentA', 'NestedChildA', 'class') == + [('NestedParentA.NestedChildA', + ('roles', 'NestedParentA.NestedChildA', 'class', False))]) + assert (find_obj(None, None, 'NestedParentA.NestedChildA.subchild_1', 'meth') == + [('NestedParentA.NestedChildA.subchild_1', + ('roles', 'NestedParentA.NestedChildA.subchild_1', 'method', False))]) + assert (find_obj(None, 'NestedParentA', 'NestedChildA.subchild_1', 'meth') == + [('NestedParentA.NestedChildA.subchild_1', + ('roles', 'NestedParentA.NestedChildA.subchild_1', 'method', False))]) + assert (find_obj(None, 'NestedParentA.NestedChildA', 'subchild_1', 'meth') == + [('NestedParentA.NestedChildA.subchild_1', + ('roles', 'NestedParentA.NestedChildA.subchild_1', 'method', False))]) + + +@pytest.mark.sphinx('html', testroot='domain-py', freshenv=True) +def test_domain_py_canonical(app, status, warning): + app.builder.build_all() + + content = (app.outdir / 'canonical.html').read_text(encoding='utf8') + assert ('<a class="reference internal" href="#canonical.Foo" title="canonical.Foo">' + '<code class="xref py py-class docutils literal notranslate">' + '<span class="pre">Foo</span></code></a>' in content) + assert warning.getvalue() == '' + + +def test_get_full_qualified_name(): + env = Mock(domaindata={}) + domain = PythonDomain(env) + + # non-python references + node = nodes.reference() + assert domain.get_full_qualified_name(node) is None + + # simple reference + node = nodes.reference(reftarget='func') + assert domain.get_full_qualified_name(node) == 'func' + + # with py:module context + kwargs = {'py:module': 'module1'} + node = nodes.reference(reftarget='func', **kwargs) + assert domain.get_full_qualified_name(node) == 'module1.func' + + # with py:class context + kwargs = {'py:class': 'Class'} + node = nodes.reference(reftarget='func', **kwargs) + assert domain.get_full_qualified_name(node) == 'Class.func' + + # with both py:module and py:class context + kwargs = {'py:module': 'module1', 'py:class': 'Class'} + node = nodes.reference(reftarget='func', **kwargs) + assert domain.get_full_qualified_name(node) == 'module1.Class.func' + + +def test_parse_annotation(app): + doctree = _parse_annotation("int", app.env) + assert_node(doctree, ([pending_xref, "int"],)) + assert_node(doctree[0], pending_xref, refdomain="py", reftype="class", reftarget="int") + + doctree = _parse_annotation("List[int]", app.env) + assert_node(doctree, ([pending_xref, "List"], + [desc_sig_punctuation, "["], + [pending_xref, "int"], + [desc_sig_punctuation, "]"])) + + doctree = _parse_annotation("Tuple[int, int]", app.env) + assert_node(doctree, ([pending_xref, "Tuple"], + [desc_sig_punctuation, "["], + [pending_xref, "int"], + [desc_sig_punctuation, ","], + desc_sig_space, + [pending_xref, "int"], + [desc_sig_punctuation, "]"])) + + doctree = _parse_annotation("Tuple[()]", app.env) + assert_node(doctree, ([pending_xref, "Tuple"], + [desc_sig_punctuation, "["], + [desc_sig_punctuation, "("], + [desc_sig_punctuation, ")"], + [desc_sig_punctuation, "]"])) + + doctree = _parse_annotation("Tuple[int, ...]", app.env) + assert_node(doctree, ([pending_xref, "Tuple"], + [desc_sig_punctuation, "["], + [pending_xref, "int"], + [desc_sig_punctuation, ","], + desc_sig_space, + [desc_sig_punctuation, "..."], + [desc_sig_punctuation, "]"])) + + doctree = _parse_annotation("Callable[[int, int], int]", app.env) + assert_node(doctree, ([pending_xref, "Callable"], + [desc_sig_punctuation, "["], + [desc_sig_punctuation, "["], + [pending_xref, "int"], + [desc_sig_punctuation, ","], + desc_sig_space, + [pending_xref, "int"], + [desc_sig_punctuation, "]"], + [desc_sig_punctuation, ","], + desc_sig_space, + [pending_xref, "int"], + [desc_sig_punctuation, "]"])) + + doctree = _parse_annotation("Callable[[], int]", app.env) + assert_node(doctree, ([pending_xref, "Callable"], + [desc_sig_punctuation, "["], + [desc_sig_punctuation, "["], + [desc_sig_punctuation, "]"], + [desc_sig_punctuation, ","], + desc_sig_space, + [pending_xref, "int"], + [desc_sig_punctuation, "]"])) + + doctree = _parse_annotation("List[None]", app.env) + assert_node(doctree, ([pending_xref, "List"], + [desc_sig_punctuation, "["], + [pending_xref, "None"], + [desc_sig_punctuation, "]"])) + + # None type makes an object-reference (not a class reference) + doctree = _parse_annotation("None", app.env) + assert_node(doctree, ([pending_xref, "None"],)) + assert_node(doctree[0], pending_xref, refdomain="py", reftype="obj", reftarget="None") + + # Literal type makes an object-reference (not a class reference) + doctree = _parse_annotation("typing.Literal['a', 'b']", app.env) + assert_node(doctree, ([pending_xref, "Literal"], + [desc_sig_punctuation, "["], + [desc_sig_literal_string, "'a'"], + [desc_sig_punctuation, ","], + desc_sig_space, + [desc_sig_literal_string, "'b'"], + [desc_sig_punctuation, "]"])) + assert_node(doctree[0], pending_xref, refdomain="py", reftype="obj", reftarget="typing.Literal") + + +def test_parse_annotation_suppress(app): + doctree = _parse_annotation("~typing.Dict[str, str]", app.env) + assert_node(doctree, ([pending_xref, "Dict"], + [desc_sig_punctuation, "["], + [pending_xref, "str"], + [desc_sig_punctuation, ","], + desc_sig_space, + [pending_xref, "str"], + [desc_sig_punctuation, "]"])) + assert_node(doctree[0], pending_xref, refdomain="py", reftype="obj", reftarget="typing.Dict") + + +def test_parse_annotation_Literal(app): + doctree = _parse_annotation("Literal[True, False]", app.env) + assert_node(doctree, ([pending_xref, "Literal"], + [desc_sig_punctuation, "["], + [desc_sig_keyword, "True"], + [desc_sig_punctuation, ","], + desc_sig_space, + [desc_sig_keyword, "False"], + [desc_sig_punctuation, "]"])) + + doctree = _parse_annotation("typing.Literal[0, 1, 'abc']", app.env) + assert_node(doctree, ([pending_xref, "Literal"], + [desc_sig_punctuation, "["], + [desc_sig_literal_number, "0"], + [desc_sig_punctuation, ","], + desc_sig_space, + [desc_sig_literal_number, "1"], + [desc_sig_punctuation, ","], + desc_sig_space, + [desc_sig_literal_string, "'abc'"], + [desc_sig_punctuation, "]"])) + + +def test_pyfunction_signature(app): + text = ".. py:function:: hello(name: str) -> str" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_name, "hello"], + desc_parameterlist, + [desc_returns, pending_xref, "str"])], + desc_content)])) + assert_node(doctree[1], addnodes.desc, desctype="function", + domain="py", objtype="function", no_index=False) + assert_node(doctree[1][0][1], + [desc_parameterlist, desc_parameter, ([desc_sig_name, "name"], + [desc_sig_punctuation, ":"], + desc_sig_space, + [nodes.inline, pending_xref, "str"])]) + + +def test_pyfunction_signature_full(app): + text = (".. py:function:: hello(a: str, b = 1, *args: str, " + "c: bool = True, d: tuple = (1, 2), **kwargs: str) -> str") + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_name, "hello"], + desc_parameterlist, + [desc_returns, pending_xref, "str"])], + desc_content)])) + assert_node(doctree[1], addnodes.desc, desctype="function", + domain="py", objtype="function", no_index=False) + assert_node(doctree[1][0][1], + [desc_parameterlist, ([desc_parameter, ([desc_sig_name, "a"], + [desc_sig_punctuation, ":"], + desc_sig_space, + [desc_sig_name, pending_xref, "str"])], + [desc_parameter, ([desc_sig_name, "b"], + [desc_sig_operator, "="], + [nodes.inline, "1"])], + [desc_parameter, ([desc_sig_operator, "*"], + [desc_sig_name, "args"], + [desc_sig_punctuation, ":"], + desc_sig_space, + [desc_sig_name, pending_xref, "str"])], + [desc_parameter, ([desc_sig_name, "c"], + [desc_sig_punctuation, ":"], + desc_sig_space, + [desc_sig_name, pending_xref, "bool"], + desc_sig_space, + [desc_sig_operator, "="], + desc_sig_space, + [nodes.inline, "True"])], + [desc_parameter, ([desc_sig_name, "d"], + [desc_sig_punctuation, ":"], + desc_sig_space, + [desc_sig_name, pending_xref, "tuple"], + desc_sig_space, + [desc_sig_operator, "="], + desc_sig_space, + [nodes.inline, "(1, 2)"])], + [desc_parameter, ([desc_sig_operator, "**"], + [desc_sig_name, "kwargs"], + [desc_sig_punctuation, ":"], + desc_sig_space, + [desc_sig_name, pending_xref, "str"])])]) + # case: separator at head + text = ".. py:function:: hello(*, a)" + doctree = restructuredtext.parse(app, text) + assert_node(doctree[1][0][1], + [desc_parameterlist, ([desc_parameter, nodes.inline, "*"], + [desc_parameter, desc_sig_name, "a"])]) + + # case: separator in the middle + text = ".. py:function:: hello(a, /, b, *, c)" + doctree = restructuredtext.parse(app, text) + assert_node(doctree[1][0][1], + [desc_parameterlist, ([desc_parameter, desc_sig_name, "a"], + [desc_parameter, desc_sig_operator, "/"], + [desc_parameter, desc_sig_name, "b"], + [desc_parameter, desc_sig_operator, "*"], + [desc_parameter, desc_sig_name, "c"])]) + + # case: separator in the middle (2) + text = ".. py:function:: hello(a, /, *, b)" + doctree = restructuredtext.parse(app, text) + assert_node(doctree[1][0][1], + [desc_parameterlist, ([desc_parameter, desc_sig_name, "a"], + [desc_parameter, desc_sig_operator, "/"], + [desc_parameter, desc_sig_operator, "*"], + [desc_parameter, desc_sig_name, "b"])]) + + # case: separator at tail + text = ".. py:function:: hello(a, /)" + doctree = restructuredtext.parse(app, text) + assert_node(doctree[1][0][1], + [desc_parameterlist, ([desc_parameter, desc_sig_name, "a"], + [desc_parameter, desc_sig_operator, "/"])]) + + +def test_pyfunction_with_unary_operators(app): + text = ".. py:function:: menu(egg=+1, bacon=-1, sausage=~1, spam=not spam)" + doctree = restructuredtext.parse(app, text) + assert_node(doctree[1][0][1], + [desc_parameterlist, ([desc_parameter, ([desc_sig_name, "egg"], + [desc_sig_operator, "="], + [nodes.inline, "+1"])], + [desc_parameter, ([desc_sig_name, "bacon"], + [desc_sig_operator, "="], + [nodes.inline, "-1"])], + [desc_parameter, ([desc_sig_name, "sausage"], + [desc_sig_operator, "="], + [nodes.inline, "~1"])], + [desc_parameter, ([desc_sig_name, "spam"], + [desc_sig_operator, "="], + [nodes.inline, "not spam"])])]) + + +def test_pyfunction_with_binary_operators(app): + text = ".. py:function:: menu(spam=2**64)" + doctree = restructuredtext.parse(app, text) + assert_node(doctree[1][0][1], + [desc_parameterlist, ([desc_parameter, ([desc_sig_name, "spam"], + [desc_sig_operator, "="], + [nodes.inline, "2**64"])])]) + + +def test_pyfunction_with_number_literals(app): + text = ".. py:function:: hello(age=0x10, height=1_6_0)" + doctree = restructuredtext.parse(app, text) + assert_node(doctree[1][0][1], + [desc_parameterlist, ([desc_parameter, ([desc_sig_name, "age"], + [desc_sig_operator, "="], + [nodes.inline, "0x10"])], + [desc_parameter, ([desc_sig_name, "height"], + [desc_sig_operator, "="], + [nodes.inline, "1_6_0"])])]) + + +def test_pyfunction_with_union_type_operator(app): + text = ".. py:function:: hello(age: int | None)" + doctree = restructuredtext.parse(app, text) + assert_node(doctree[1][0][1], + [desc_parameterlist, ([desc_parameter, ([desc_sig_name, "age"], + [desc_sig_punctuation, ":"], + desc_sig_space, + [desc_sig_name, ([pending_xref, "int"], + desc_sig_space, + [desc_sig_punctuation, "|"], + desc_sig_space, + [pending_xref, "None"])])])]) + + +def test_optional_pyfunction_signature(app): + text = ".. py:function:: compile(source [, filename [, symbol]]) -> ast object" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_name, "compile"], + desc_parameterlist, + [desc_returns, pending_xref, "ast object"])], + desc_content)])) + assert_node(doctree[1], addnodes.desc, desctype="function", + domain="py", objtype="function", no_index=False) + assert_node(doctree[1][0][1], + ([desc_parameter, ([desc_sig_name, "source"])], + [desc_optional, ([desc_parameter, ([desc_sig_name, "filename"])], + [desc_optional, desc_parameter, ([desc_sig_name, "symbol"])])])) + + +def test_pyexception_signature(app): + text = ".. py:exception:: builtins.IOError" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_annotation, ('exception', desc_sig_space)], + [desc_addname, "builtins."], + [desc_name, "IOError"])], + desc_content)])) + assert_node(doctree[1], desc, desctype="exception", + domain="py", objtype="exception", no_index=False) + + +def test_pydata_signature(app): + text = (".. py:data:: version\n" + " :type: int\n" + " :value: 1\n") + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_name, "version"], + [desc_annotation, ([desc_sig_punctuation, ':'], + desc_sig_space, + [pending_xref, "int"])], + [desc_annotation, ( + desc_sig_space, + [desc_sig_punctuation, '='], + desc_sig_space, + "1")], + )], + desc_content)])) + assert_node(doctree[1], addnodes.desc, desctype="data", + domain="py", objtype="data", no_index=False) + + +def test_pydata_signature_old(app): + text = (".. py:data:: version\n" + " :annotation: = 1\n") + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_name, "version"], + [desc_annotation, (desc_sig_space, + "= 1")])], + desc_content)])) + assert_node(doctree[1], addnodes.desc, desctype="data", + domain="py", objtype="data", no_index=False) + + +def test_pydata_with_union_type_operator(app): + text = (".. py:data:: version\n" + " :type: int | str") + doctree = restructuredtext.parse(app, text) + assert_node(doctree[1][0], + ([desc_name, "version"], + [desc_annotation, ([desc_sig_punctuation, ':'], + desc_sig_space, + [pending_xref, "int"], + desc_sig_space, + [desc_sig_punctuation, "|"], + desc_sig_space, + [pending_xref, "str"])])) + + +def test_pyobject_prefix(app): + text = (".. py:class:: Foo\n" + "\n" + " .. py:method:: Foo.say\n" + " .. py:method:: FooBar.say") + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_annotation, ('class', desc_sig_space)], + [desc_name, "Foo"])], + [desc_content, (addnodes.index, + desc, + addnodes.index, + desc)])])) + assert doctree[1][1][1].astext().strip() == 'say()' # prefix is stripped + assert doctree[1][1][3].astext().strip() == 'FooBar.say()' # not stripped + + +def test_pydata(app): + text = (".. py:module:: example\n" + ".. py:data:: var\n" + " :type: int\n") + domain = app.env.get_domain('py') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + addnodes.index, + nodes.target, + [desc, ([desc_signature, ([desc_addname, "example."], + [desc_name, "var"], + [desc_annotation, ([desc_sig_punctuation, ':'], + desc_sig_space, + [pending_xref, "int"])])], + [desc_content, ()])])) + assert_node(doctree[3][0][2][2], pending_xref, **{"py:module": "example"}) + assert 'example.var' in domain.objects + assert domain.objects['example.var'] == ('index', 'example.var', 'data', False) + + +def test_pyfunction(app): + text = (".. py:function:: func1\n" + ".. py:module:: example\n" + ".. py:function:: func2\n" + " :async:\n") + domain = app.env.get_domain('py') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_name, "func1"], + [desc_parameterlist, ()])], + [desc_content, ()])], + addnodes.index, + addnodes.index, + nodes.target, + [desc, ([desc_signature, ([desc_annotation, ([desc_sig_keyword, 'async'], + desc_sig_space)], + [desc_addname, "example."], + [desc_name, "func2"], + [desc_parameterlist, ()])], + [desc_content, ()])])) + assert_node(doctree[0], addnodes.index, + entries=[('pair', 'built-in function; func1()', 'func1', '', None)]) + assert_node(doctree[2], addnodes.index, + entries=[('pair', 'module; example', 'module-example', '', None)]) + assert_node(doctree[3], addnodes.index, + entries=[('single', 'func2() (in module example)', 'example.func2', '', None)]) + + assert 'func1' in domain.objects + assert domain.objects['func1'] == ('index', 'func1', 'function', False) + assert 'example.func2' in domain.objects + assert domain.objects['example.func2'] == ('index', 'example.func2', 'function', False) + + +def test_pyclass_options(app): + text = (".. py:class:: Class1\n" + ".. py:class:: Class2\n" + " :final:\n") + domain = app.env.get_domain('py') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_annotation, ("class", desc_sig_space)], + [desc_name, "Class1"])], + [desc_content, ()])], + addnodes.index, + [desc, ([desc_signature, ([desc_annotation, ("final", + desc_sig_space, + "class", + desc_sig_space)], + [desc_name, "Class2"])], + [desc_content, ()])])) + + # class + assert_node(doctree[0], addnodes.index, + entries=[('single', 'Class1 (built-in class)', 'Class1', '', None)]) + assert 'Class1' in domain.objects + assert domain.objects['Class1'] == ('index', 'Class1', 'class', False) + + # :final: + assert_node(doctree[2], addnodes.index, + entries=[('single', 'Class2 (built-in class)', 'Class2', '', None)]) + assert 'Class2' in domain.objects + assert domain.objects['Class2'] == ('index', 'Class2', 'class', False) + + +def test_pymethod_options(app): + text = (".. py:class:: Class\n" + "\n" + " .. py:method:: meth1\n" + " .. py:method:: meth2\n" + " :classmethod:\n" + " .. py:method:: meth3\n" + " :staticmethod:\n" + " .. py:method:: meth4\n" + " :async:\n" + " .. py:method:: meth5\n" + " :abstractmethod:\n" + " .. py:method:: meth6\n" + " :final:\n") + domain = app.env.get_domain('py') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_annotation, ("class", desc_sig_space)], + [desc_name, "Class"])], + [desc_content, (addnodes.index, + desc, + addnodes.index, + desc, + addnodes.index, + desc, + addnodes.index, + desc, + addnodes.index, + desc, + addnodes.index, + desc)])])) + + # method + assert_node(doctree[1][1][0], addnodes.index, + entries=[('single', 'meth1() (Class method)', 'Class.meth1', '', None)]) + assert_node(doctree[1][1][1], ([desc_signature, ([desc_name, "meth1"], + [desc_parameterlist, ()])], + [desc_content, ()])) + assert 'Class.meth1' in domain.objects + assert domain.objects['Class.meth1'] == ('index', 'Class.meth1', 'method', False) + + # :classmethod: + assert_node(doctree[1][1][2], addnodes.index, + entries=[('single', 'meth2() (Class class method)', 'Class.meth2', '', None)]) + assert_node(doctree[1][1][3], ([desc_signature, ([desc_annotation, ("classmethod", desc_sig_space)], + [desc_name, "meth2"], + [desc_parameterlist, ()])], + [desc_content, ()])) + assert 'Class.meth2' in domain.objects + assert domain.objects['Class.meth2'] == ('index', 'Class.meth2', 'method', False) + + # :staticmethod: + assert_node(doctree[1][1][4], addnodes.index, + entries=[('single', 'meth3() (Class static method)', 'Class.meth3', '', None)]) + assert_node(doctree[1][1][5], ([desc_signature, ([desc_annotation, ("static", desc_sig_space)], + [desc_name, "meth3"], + [desc_parameterlist, ()])], + [desc_content, ()])) + assert 'Class.meth3' in domain.objects + assert domain.objects['Class.meth3'] == ('index', 'Class.meth3', 'method', False) + + # :async: + assert_node(doctree[1][1][6], addnodes.index, + entries=[('single', 'meth4() (Class method)', 'Class.meth4', '', None)]) + assert_node(doctree[1][1][7], ([desc_signature, ([desc_annotation, ("async", desc_sig_space)], + [desc_name, "meth4"], + [desc_parameterlist, ()])], + [desc_content, ()])) + assert 'Class.meth4' in domain.objects + assert domain.objects['Class.meth4'] == ('index', 'Class.meth4', 'method', False) + + # :abstractmethod: + assert_node(doctree[1][1][8], addnodes.index, + entries=[('single', 'meth5() (Class method)', 'Class.meth5', '', None)]) + assert_node(doctree[1][1][9], ([desc_signature, ([desc_annotation, ("abstract", desc_sig_space)], + [desc_name, "meth5"], + [desc_parameterlist, ()])], + [desc_content, ()])) + assert 'Class.meth5' in domain.objects + assert domain.objects['Class.meth5'] == ('index', 'Class.meth5', 'method', False) + + # :final: + assert_node(doctree[1][1][10], addnodes.index, + entries=[('single', 'meth6() (Class method)', 'Class.meth6', '', None)]) + assert_node(doctree[1][1][11], ([desc_signature, ([desc_annotation, ("final", desc_sig_space)], + [desc_name, "meth6"], + [desc_parameterlist, ()])], + [desc_content, ()])) + assert 'Class.meth6' in domain.objects + assert domain.objects['Class.meth6'] == ('index', 'Class.meth6', 'method', False) + + +def test_pyclassmethod(app): + text = (".. py:class:: Class\n" + "\n" + " .. py:classmethod:: meth\n") + domain = app.env.get_domain('py') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_annotation, ("class", desc_sig_space)], + [desc_name, "Class"])], + [desc_content, (addnodes.index, + desc)])])) + assert_node(doctree[1][1][0], addnodes.index, + entries=[('single', 'meth() (Class class method)', 'Class.meth', '', None)]) + assert_node(doctree[1][1][1], ([desc_signature, ([desc_annotation, ("classmethod", desc_sig_space)], + [desc_name, "meth"], + [desc_parameterlist, ()])], + [desc_content, ()])) + assert 'Class.meth' in domain.objects + assert domain.objects['Class.meth'] == ('index', 'Class.meth', 'method', False) + + +def test_pystaticmethod(app): + text = (".. py:class:: Class\n" + "\n" + " .. py:staticmethod:: meth\n") + domain = app.env.get_domain('py') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_annotation, ("class", desc_sig_space)], + [desc_name, "Class"])], + [desc_content, (addnodes.index, + desc)])])) + assert_node(doctree[1][1][0], addnodes.index, + entries=[('single', 'meth() (Class static method)', 'Class.meth', '', None)]) + assert_node(doctree[1][1][1], ([desc_signature, ([desc_annotation, ("static", desc_sig_space)], + [desc_name, "meth"], + [desc_parameterlist, ()])], + [desc_content, ()])) + assert 'Class.meth' in domain.objects + assert domain.objects['Class.meth'] == ('index', 'Class.meth', 'method', False) + + +def test_pyattribute(app): + text = (".. py:class:: Class\n" + "\n" + " .. py:attribute:: attr\n" + " :type: Optional[str]\n" + " :value: ''\n") + domain = app.env.get_domain('py') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_annotation, ("class", desc_sig_space)], + [desc_name, "Class"])], + [desc_content, (addnodes.index, + desc)])])) + assert_node(doctree[1][1][0], addnodes.index, + entries=[('single', 'attr (Class attribute)', 'Class.attr', '', None)]) + assert_node(doctree[1][1][1], ([desc_signature, ([desc_name, "attr"], + [desc_annotation, ([desc_sig_punctuation, ':'], + desc_sig_space, + [pending_xref, "str"], + desc_sig_space, + [desc_sig_punctuation, "|"], + desc_sig_space, + [pending_xref, "None"])], + [desc_annotation, (desc_sig_space, + [desc_sig_punctuation, '='], + desc_sig_space, + "''")], + )], + [desc_content, ()])) + assert_node(doctree[1][1][1][0][1][2], pending_xref, **{"py:class": "Class"}) + assert_node(doctree[1][1][1][0][1][6], pending_xref, **{"py:class": "Class"}) + assert 'Class.attr' in domain.objects + assert domain.objects['Class.attr'] == ('index', 'Class.attr', 'attribute', False) + + +def test_pyproperty(app): + text = (".. py:class:: Class\n" + "\n" + " .. py:property:: prop1\n" + " :abstractmethod:\n" + " :type: str\n" + "\n" + " .. py:property:: prop2\n" + " :classmethod:\n" + " :type: str\n") + domain = app.env.get_domain('py') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_annotation, ("class", desc_sig_space)], + [desc_name, "Class"])], + [desc_content, (addnodes.index, + desc, + addnodes.index, + desc)])])) + assert_node(doctree[1][1][0], addnodes.index, + entries=[('single', 'prop1 (Class property)', 'Class.prop1', '', None)]) + assert_node(doctree[1][1][1], ([desc_signature, ([desc_annotation, ("abstract", desc_sig_space, + "property", desc_sig_space)], + [desc_name, "prop1"], + [desc_annotation, ([desc_sig_punctuation, ':'], + desc_sig_space, + [pending_xref, "str"])])], + [desc_content, ()])) + assert_node(doctree[1][1][2], addnodes.index, + entries=[('single', 'prop2 (Class property)', 'Class.prop2', '', None)]) + assert_node(doctree[1][1][3], ([desc_signature, ([desc_annotation, ("class", desc_sig_space, + "property", desc_sig_space)], + [desc_name, "prop2"], + [desc_annotation, ([desc_sig_punctuation, ':'], + desc_sig_space, + [pending_xref, "str"])])], + [desc_content, ()])) + assert 'Class.prop1' in domain.objects + assert domain.objects['Class.prop1'] == ('index', 'Class.prop1', 'property', False) + assert 'Class.prop2' in domain.objects + assert domain.objects['Class.prop2'] == ('index', 'Class.prop2', 'property', False) + + +def test_pydecorator_signature(app): + text = ".. py:decorator:: deco" + domain = app.env.get_domain('py') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_addname, "@"], + [desc_name, "deco"])], + desc_content)])) + assert_node(doctree[1], addnodes.desc, desctype="function", + domain="py", objtype="function", no_index=False) + + assert 'deco' in domain.objects + assert domain.objects['deco'] == ('index', 'deco', 'function', False) + + +def test_pydecoratormethod_signature(app): + text = ".. py:decoratormethod:: deco" + domain = app.env.get_domain('py') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_addname, "@"], + [desc_name, "deco"])], + desc_content)])) + assert_node(doctree[1], addnodes.desc, desctype="method", + domain="py", objtype="method", no_index=False) + + assert 'deco' in domain.objects + assert domain.objects['deco'] == ('index', 'deco', 'method', False) + + +def test_canonical(app): + text = (".. py:class:: io.StringIO\n" + " :canonical: _io.StringIO") + domain = app.env.get_domain('py') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_annotation, ("class", desc_sig_space)], + [desc_addname, "io."], + [desc_name, "StringIO"])], + desc_content)])) + assert 'io.StringIO' in domain.objects + assert domain.objects['io.StringIO'] == ('index', 'io.StringIO', 'class', False) + assert domain.objects['_io.StringIO'] == ('index', 'io.StringIO', 'class', True) + + +def test_canonical_definition_overrides(app, warning): + text = (".. py:class:: io.StringIO\n" + " :canonical: _io.StringIO\n" + ".. py:class:: _io.StringIO\n") + restructuredtext.parse(app, text) + assert warning.getvalue() == "" + + domain = app.env.get_domain('py') + assert domain.objects['_io.StringIO'] == ('index', 'id0', 'class', False) + + +def test_canonical_definition_skip(app, warning): + text = (".. py:class:: _io.StringIO\n" + ".. py:class:: io.StringIO\n" + " :canonical: _io.StringIO\n") + + restructuredtext.parse(app, text) + assert warning.getvalue() == "" + + domain = app.env.get_domain('py') + assert domain.objects['_io.StringIO'] == ('index', 'io.StringIO', 'class', False) + + +def test_canonical_duplicated(app, warning): + text = (".. py:class:: mypackage.StringIO\n" + " :canonical: _io.StringIO\n" + ".. py:class:: io.StringIO\n" + " :canonical: _io.StringIO\n") + + restructuredtext.parse(app, text) + assert warning.getvalue() != "" + + +def test_info_field_list(app): + text = (".. py:module:: example\n" + ".. py:class:: Class\n" + "\n" + " :meta blah: this meta-field must not show up in the toc-tree\n" + " :param str name: blah blah\n" + " :meta another meta field:\n" + " :param age: blah blah\n" + " :type age: int\n" + " :param items: blah blah\n" + " :type items: Tuple[str, ...]\n" + " :param Dict[str, str] params: blah blah\n") + doctree = restructuredtext.parse(app, text) + print(doctree) + + assert_node(doctree, (addnodes.index, + addnodes.index, + nodes.target, + [desc, ([desc_signature, ([desc_annotation, ("class", desc_sig_space)], + [desc_addname, "example."], + [desc_name, "Class"])], + [desc_content, nodes.field_list, nodes.field])])) + assert_node(doctree[3][1][0][0], + ([nodes.field_name, "Parameters"], + [nodes.field_body, nodes.bullet_list, ([nodes.list_item, nodes.paragraph], + [nodes.list_item, nodes.paragraph], + [nodes.list_item, nodes.paragraph], + [nodes.list_item, nodes.paragraph])])) + + # :param str name: + assert_node(doctree[3][1][0][0][1][0][0][0], + ([addnodes.literal_strong, "name"], + " (", + [pending_xref, addnodes.literal_emphasis, "str"], + ")", + " -- ", + "blah blah")) + assert_node(doctree[3][1][0][0][1][0][0][0][2], pending_xref, + refdomain="py", reftype="class", reftarget="str", + **{"py:module": "example", "py:class": "Class"}) + + # :param age: + :type age: + assert_node(doctree[3][1][0][0][1][0][1][0], + ([addnodes.literal_strong, "age"], + " (", + [pending_xref, addnodes.literal_emphasis, "int"], + ")", + " -- ", + "blah blah")) + assert_node(doctree[3][1][0][0][1][0][1][0][2], pending_xref, + refdomain="py", reftype="class", reftarget="int", + **{"py:module": "example", "py:class": "Class"}) + + # :param items: + :type items: + assert_node(doctree[3][1][0][0][1][0][2][0], + ([addnodes.literal_strong, "items"], + " (", + [pending_xref, addnodes.literal_emphasis, "Tuple"], + [addnodes.literal_emphasis, "["], + [pending_xref, addnodes.literal_emphasis, "str"], + [addnodes.literal_emphasis, ", "], + [addnodes.literal_emphasis, "..."], + [addnodes.literal_emphasis, "]"], + ")", + " -- ", + "blah blah")) + assert_node(doctree[3][1][0][0][1][0][2][0][2], pending_xref, + refdomain="py", reftype="class", reftarget="Tuple", + **{"py:module": "example", "py:class": "Class"}) + assert_node(doctree[3][1][0][0][1][0][2][0][4], pending_xref, + refdomain="py", reftype="class", reftarget="str", + **{"py:module": "example", "py:class": "Class"}) + + # :param Dict[str, str] params: + assert_node(doctree[3][1][0][0][1][0][3][0], + ([addnodes.literal_strong, "params"], + " (", + [pending_xref, addnodes.literal_emphasis, "Dict"], + [addnodes.literal_emphasis, "["], + [pending_xref, addnodes.literal_emphasis, "str"], + [addnodes.literal_emphasis, ", "], + [pending_xref, addnodes.literal_emphasis, "str"], + [addnodes.literal_emphasis, "]"], + ")", + " -- ", + "blah blah")) + assert_node(doctree[3][1][0][0][1][0][3][0][2], pending_xref, + refdomain="py", reftype="class", reftarget="Dict", + **{"py:module": "example", "py:class": "Class"}) + assert_node(doctree[3][1][0][0][1][0][3][0][4], pending_xref, + refdomain="py", reftype="class", reftarget="str", + **{"py:module": "example", "py:class": "Class"}) + assert_node(doctree[3][1][0][0][1][0][3][0][6], pending_xref, + refdomain="py", reftype="class", reftarget="str", + **{"py:module": "example", "py:class": "Class"}) + + +def test_info_field_list_piped_type(app): + text = (".. py:module:: example\n" + ".. py:class:: Class\n" + "\n" + " :param age: blah blah\n" + " :type age: int | str\n") + doctree = restructuredtext.parse(app, text) + + assert_node(doctree, + (addnodes.index, + addnodes.index, + nodes.target, + [desc, ([desc_signature, ([desc_annotation, ("class", desc_sig_space)], + [desc_addname, "example."], + [desc_name, "Class"])], + [desc_content, nodes.field_list, nodes.field, (nodes.field_name, + nodes.field_body)])])) + assert_node(doctree[3][1][0][0][1], + ([nodes.paragraph, ([addnodes.literal_strong, "age"], + " (", + [pending_xref, addnodes.literal_emphasis, "int"], + [addnodes.literal_emphasis, " | "], + [pending_xref, addnodes.literal_emphasis, "str"], + ")", + " -- ", + "blah blah")],)) + assert_node(doctree[3][1][0][0][1][0][2], pending_xref, + refdomain="py", reftype="class", reftarget="int", + **{"py:module": "example", "py:class": "Class"}) + assert_node(doctree[3][1][0][0][1][0][4], pending_xref, + refdomain="py", reftype="class", reftarget="str", + **{"py:module": "example", "py:class": "Class"}) + + +def test_info_field_list_Literal(app): + text = (".. py:module:: example\n" + ".. py:class:: Class\n" + "\n" + " :param age: blah blah\n" + " :type age: Literal['foo', 'bar', 'baz']\n") + doctree = restructuredtext.parse(app, text) + + assert_node(doctree, + (addnodes.index, + addnodes.index, + nodes.target, + [desc, ([desc_signature, ([desc_annotation, ("class", desc_sig_space)], + [desc_addname, "example."], + [desc_name, "Class"])], + [desc_content, nodes.field_list, nodes.field, (nodes.field_name, + nodes.field_body)])])) + assert_node(doctree[3][1][0][0][1], + ([nodes.paragraph, ([addnodes.literal_strong, "age"], + " (", + [pending_xref, addnodes.literal_emphasis, "Literal"], + [addnodes.literal_emphasis, "["], + [addnodes.literal_emphasis, "'foo'"], + [addnodes.literal_emphasis, ", "], + [addnodes.literal_emphasis, "'bar'"], + [addnodes.literal_emphasis, ", "], + [addnodes.literal_emphasis, "'baz'"], + [addnodes.literal_emphasis, "]"], + ")", + " -- ", + "blah blah")],)) + assert_node(doctree[3][1][0][0][1][0][2], pending_xref, + refdomain="py", reftype="class", reftarget="Literal", + **{"py:module": "example", "py:class": "Class"}) + + +def test_info_field_list_var(app): + text = (".. py:class:: Class\n" + "\n" + " :var int attr: blah blah\n") + doctree = restructuredtext.parse(app, text) + + assert_node(doctree, (addnodes.index, + [desc, (desc_signature, + [desc_content, nodes.field_list, nodes.field])])) + assert_node(doctree[1][1][0][0], ([nodes.field_name, "Variables"], + [nodes.field_body, nodes.paragraph])) + + # :var int attr: + assert_node(doctree[1][1][0][0][1][0], + ([addnodes.literal_strong, "attr"], + " (", + [pending_xref, addnodes.literal_emphasis, "int"], + ")", + " -- ", + "blah blah")) + assert_node(doctree[1][1][0][0][1][0][2], pending_xref, + refdomain="py", reftype="class", reftarget="int", **{"py:class": "Class"}) + + +def test_info_field_list_napoleon_deliminator_of(app): + text = (".. py:module:: example\n" + ".. py:class:: Class\n" + "\n" + " :param list_str_var: example description.\n" + " :type list_str_var: list of str\n" + " :param tuple_int_var: example description.\n" + " :type tuple_int_var: tuple of tuple of int\n" + ) + doctree = restructuredtext.parse(app, text) + + # :param list of str list_str_var: + assert_node(doctree[3][1][0][0][1][0][0][0], + ([addnodes.literal_strong, "list_str_var"], + " (", + [pending_xref, addnodes.literal_emphasis, "list"], + [addnodes.literal_emphasis, " of "], + [pending_xref, addnodes.literal_emphasis, "str"], + ")", + " -- ", + "example description.")) + + # :param tuple of tuple of int tuple_int_var: + assert_node(doctree[3][1][0][0][1][0][1][0], + ([addnodes.literal_strong, "tuple_int_var"], + " (", + [pending_xref, addnodes.literal_emphasis, "tuple"], + [addnodes.literal_emphasis, " of "], + [pending_xref, addnodes.literal_emphasis, "tuple"], + [addnodes.literal_emphasis, " of "], + [pending_xref, addnodes.literal_emphasis, "int"], + ")", + " -- ", + "example description.")) + + +def test_info_field_list_napoleon_deliminator_or(app): + text = (".. py:module:: example\n" + ".. py:class:: Class\n" + "\n" + " :param bool_str_var: example description.\n" + " :type bool_str_var: bool or str\n" + " :param str_float_int_var: example description.\n" + " :type str_float_int_var: str or float or int\n" + ) + doctree = restructuredtext.parse(app, text) + + # :param bool or str bool_str_var: + assert_node(doctree[3][1][0][0][1][0][0][0], + ([addnodes.literal_strong, "bool_str_var"], + " (", + [pending_xref, addnodes.literal_emphasis, "bool"], + [addnodes.literal_emphasis, " or "], + [pending_xref, addnodes.literal_emphasis, "str"], + ")", + " -- ", + "example description.")) + + # :param str or float or int str_float_int_var: + assert_node(doctree[3][1][0][0][1][0][1][0], + ([addnodes.literal_strong, "str_float_int_var"], + " (", + [pending_xref, addnodes.literal_emphasis, "str"], + [addnodes.literal_emphasis, " or "], + [pending_xref, addnodes.literal_emphasis, "float"], + [addnodes.literal_emphasis, " or "], + [pending_xref, addnodes.literal_emphasis, "int"], + ")", + " -- ", + "example description.")) + + +def test_type_field(app): + text = (".. py:data:: var1\n" + " :type: .int\n" + ".. py:data:: var2\n" + " :type: ~builtins.int\n" + ".. py:data:: var3\n" + " :type: typing.Optional[typing.Tuple[int, typing.Any]]\n") + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_name, "var1"], + [desc_annotation, ([desc_sig_punctuation, ':'], + desc_sig_space, + [pending_xref, "int"])])], + [desc_content, ()])], + addnodes.index, + [desc, ([desc_signature, ([desc_name, "var2"], + [desc_annotation, ([desc_sig_punctuation, ':'], + desc_sig_space, + [pending_xref, "int"])])], + [desc_content, ()])], + addnodes.index, + [desc, ([desc_signature, ([desc_name, "var3"], + [desc_annotation, ([desc_sig_punctuation, ":"], + desc_sig_space, + [pending_xref, "Optional"], + [desc_sig_punctuation, "["], + [pending_xref, "Tuple"], + [desc_sig_punctuation, "["], + [pending_xref, "int"], + [desc_sig_punctuation, ","], + desc_sig_space, + [pending_xref, "Any"], + [desc_sig_punctuation, "]"], + [desc_sig_punctuation, "]"])])], + [desc_content, ()])])) + assert_node(doctree[1][0][1][2], pending_xref, reftarget='int', refspecific=True) + assert_node(doctree[3][0][1][2], pending_xref, reftarget='builtins.int', refspecific=False) + assert_node(doctree[5][0][1][2], pending_xref, reftarget='typing.Optional', refspecific=False) + assert_node(doctree[5][0][1][4], pending_xref, reftarget='typing.Tuple', refspecific=False) + assert_node(doctree[5][0][1][6], pending_xref, reftarget='int', refspecific=False) + assert_node(doctree[5][0][1][9], pending_xref, reftarget='typing.Any', refspecific=False) + + +@pytest.mark.sphinx(freshenv=True) +def test_module_index(app): + text = (".. py:module:: docutils\n" + ".. py:module:: sphinx\n" + ".. py:module:: sphinx.config\n" + ".. py:module:: sphinx.builders\n" + ".. py:module:: sphinx.builders.html\n" + ".. py:module:: sphinx_intl\n") + restructuredtext.parse(app, text) + index = PythonModuleIndex(app.env.get_domain('py')) + assert index.generate() == ( + [('d', [IndexEntry('docutils', 0, 'index', 'module-docutils', '', '', '')]), + ('s', [IndexEntry('sphinx', 1, 'index', 'module-sphinx', '', '', ''), + IndexEntry('sphinx.builders', 2, 'index', 'module-sphinx.builders', '', '', ''), + IndexEntry('sphinx.builders.html', 2, 'index', 'module-sphinx.builders.html', '', '', ''), + IndexEntry('sphinx.config', 2, 'index', 'module-sphinx.config', '', '', ''), + IndexEntry('sphinx_intl', 0, 'index', 'module-sphinx_intl', '', '', '')])], + False, + ) + + +@pytest.mark.sphinx(freshenv=True) +def test_module_index_submodule(app): + text = ".. py:module:: sphinx.config\n" + restructuredtext.parse(app, text) + index = PythonModuleIndex(app.env.get_domain('py')) + assert index.generate() == ( + [('s', [IndexEntry('sphinx', 1, '', '', '', '', ''), + IndexEntry('sphinx.config', 2, 'index', 'module-sphinx.config', '', '', '')])], + False, + ) + + +@pytest.mark.sphinx(freshenv=True) +def test_module_index_not_collapsed(app): + text = (".. py:module:: docutils\n" + ".. py:module:: sphinx\n") + restructuredtext.parse(app, text) + index = PythonModuleIndex(app.env.get_domain('py')) + assert index.generate() == ( + [('d', [IndexEntry('docutils', 0, 'index', 'module-docutils', '', '', '')]), + ('s', [IndexEntry('sphinx', 0, 'index', 'module-sphinx', '', '', '')])], + True, + ) + + +@pytest.mark.sphinx(freshenv=True, confoverrides={'modindex_common_prefix': ['sphinx.']}) +def test_modindex_common_prefix(app): + text = (".. py:module:: docutils\n" + ".. py:module:: sphinx\n" + ".. py:module:: sphinx.config\n" + ".. py:module:: sphinx.builders\n" + ".. py:module:: sphinx.builders.html\n" + ".. py:module:: sphinx_intl\n") + restructuredtext.parse(app, text) + index = PythonModuleIndex(app.env.get_domain('py')) + assert index.generate() == ( + [('b', [IndexEntry('sphinx.builders', 1, 'index', 'module-sphinx.builders', '', '', ''), + IndexEntry('sphinx.builders.html', 2, 'index', 'module-sphinx.builders.html', '', '', '')]), + ('c', [IndexEntry('sphinx.config', 0, 'index', 'module-sphinx.config', '', '', '')]), + ('d', [IndexEntry('docutils', 0, 'index', 'module-docutils', '', '', '')]), + ('s', [IndexEntry('sphinx', 0, 'index', 'module-sphinx', '', '', ''), + IndexEntry('sphinx_intl', 0, 'index', 'module-sphinx_intl', '', '', '')])], + True, + ) + + +def test_no_index_entry(app): + text = (".. py:function:: f()\n" + ".. py:function:: g()\n" + " :no-index-entry:\n") + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, desc, addnodes.index, desc)) + assert_node(doctree[0], addnodes.index, entries=[('pair', 'built-in function; f()', 'f', '', None)]) + assert_node(doctree[2], addnodes.index, entries=[]) + + text = (".. py:class:: f\n" + ".. py:class:: g\n" + " :no-index-entry:\n") + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, desc, addnodes.index, desc)) + assert_node(doctree[0], addnodes.index, entries=[('single', 'f (built-in class)', 'f', '', None)]) + assert_node(doctree[2], addnodes.index, entries=[]) + + +@pytest.mark.sphinx('html', testroot='domain-py-python_use_unqualified_type_names') +def test_python_python_use_unqualified_type_names(app, status, warning): + app.build() + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert ('<span class="n"><a class="reference internal" href="#foo.Name" title="foo.Name">' + '<span class="pre">Name</span></a></span>' in content) + assert '<span class="n"><span class="pre">foo.Age</span></span>' in content + assert ('<p><strong>name</strong> (<a class="reference internal" href="#foo.Name" ' + 'title="foo.Name"><em>Name</em></a>) – blah blah</p>' in content) + assert '<p><strong>age</strong> (<em>foo.Age</em>) – blah blah</p>' in content + + +@pytest.mark.sphinx('html', testroot='domain-py-python_use_unqualified_type_names', + confoverrides={'python_use_unqualified_type_names': False}) +def test_python_python_use_unqualified_type_names_disabled(app, status, warning): + app.build() + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert ('<span class="n"><a class="reference internal" href="#foo.Name" title="foo.Name">' + '<span class="pre">foo.Name</span></a></span>' in content) + assert '<span class="n"><span class="pre">foo.Age</span></span>' in content + assert ('<p><strong>name</strong> (<a class="reference internal" href="#foo.Name" ' + 'title="foo.Name"><em>foo.Name</em></a>) – blah blah</p>' in content) + assert '<p><strong>age</strong> (<em>foo.Age</em>) – blah blah</p>' in content + + +@pytest.mark.sphinx('dummy', testroot='domain-py-xref-warning') +def test_warn_missing_reference(app, status, warning): + app.build() + assert "index.rst:6: WARNING: undefined label: 'no-label'" in warning.getvalue() + assert ("index.rst:6: WARNING: Failed to create a cross reference. " + "A title or caption not found: 'existing-label'") in warning.getvalue() + + +@pytest.mark.sphinx(confoverrides={'nitpicky': True}) +@pytest.mark.parametrize('include_options', [True, False]) +def test_signature_line_number(app, include_options): + text = (".. py:function:: foo(bar : string)\n" + + (" :no-index-entry:\n" if include_options else "")) + doc = restructuredtext.parse(app, text) + xrefs = list(doc.findall(condition=addnodes.pending_xref)) + assert len(xrefs) == 1 + source, line = docutils.utils.get_source_line(xrefs[0]) + assert 'index.rst' in source + assert line == 1 + + +@pytest.mark.sphinx('html', confoverrides={ + 'python_maximum_signature_line_length': len("hello(name: str) -> str"), +}) +def test_pyfunction_signature_with_python_maximum_signature_line_length_equal(app): + text = ".. py:function:: hello(name: str) -> str" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_name, "hello"], + desc_parameterlist, + [desc_returns, pending_xref, "str"], + )], + desc_content, + )], + )) + assert_node(doctree[1], addnodes.desc, desctype="function", + domain="py", objtype="function", no_index=False) + assert_node(doctree[1][0][1], [desc_parameterlist, desc_parameter, ( + [desc_sig_name, "name"], + [desc_sig_punctuation, ":"], + desc_sig_space, + [nodes.inline, pending_xref, "str"], + )]) + assert_node(doctree[1][0][1], desc_parameterlist, multi_line_parameter_list=False) + + +@pytest.mark.sphinx('html', confoverrides={ + 'python_maximum_signature_line_length': len("hello(name: str) -> str"), +}) +def test_pyfunction_signature_with_python_maximum_signature_line_length_force_single(app): + text = (".. py:function:: hello(names: str) -> str\n" + " :single-line-parameter-list:") + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_name, "hello"], + desc_parameterlist, + [desc_returns, pending_xref, "str"], + )], + desc_content, + )], + )) + assert_node(doctree[1], addnodes.desc, desctype="function", + domain="py", objtype="function", no_index=False) + assert_node(doctree[1][0][1], [desc_parameterlist, desc_parameter, ( + [desc_sig_name, "names"], + [desc_sig_punctuation, ":"], + desc_sig_space, + [nodes.inline, pending_xref, "str"], + )]) + assert_node(doctree[1][0][1], desc_parameterlist, multi_line_parameter_list=False) + + +@pytest.mark.sphinx('html', confoverrides={ + 'python_maximum_signature_line_length': len("hello(name: str) -> str"), +}) +def test_pyfunction_signature_with_python_maximum_signature_line_length_break(app): + text = ".. py:function:: hello(names: str) -> str" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_name, "hello"], + desc_parameterlist, + [desc_returns, pending_xref, "str"], + )], + desc_content, + )], + )) + assert_node(doctree[1], addnodes.desc, desctype="function", + domain="py", objtype="function", no_index=False) + assert_node(doctree[1][0][1], [desc_parameterlist, desc_parameter, ( + [desc_sig_name, "names"], + [desc_sig_punctuation, ":"], + desc_sig_space, + [nodes.inline, pending_xref, "str"], + )]) + assert_node(doctree[1][0][1], desc_parameterlist, multi_line_parameter_list=True) + + +@pytest.mark.sphinx('html', confoverrides={ + 'maximum_signature_line_length': len("hello(name: str) -> str"), +}) +def test_pyfunction_signature_with_maximum_signature_line_length_equal(app): + text = ".. py:function:: hello(name: str) -> str" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_name, "hello"], + desc_parameterlist, + [desc_returns, pending_xref, "str"], + )], + desc_content, + )], + )) + assert_node(doctree[1], addnodes.desc, desctype="function", + domain="py", objtype="function", no_index=False) + assert_node(doctree[1][0][1], [desc_parameterlist, desc_parameter, ( + [desc_sig_name, "name"], + [desc_sig_punctuation, ":"], + desc_sig_space, + [nodes.inline, pending_xref, "str"], + )]) + assert_node(doctree[1][0][1], desc_parameterlist, multi_line_parameter_list=False) + + +@pytest.mark.sphinx('html', confoverrides={ + 'maximum_signature_line_length': len("hello(name: str) -> str"), +}) +def test_pyfunction_signature_with_maximum_signature_line_length_force_single(app): + text = (".. py:function:: hello(names: str) -> str\n" + " :single-line-parameter-list:") + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_name, "hello"], + desc_parameterlist, + [desc_returns, pending_xref, "str"], + )], + desc_content, + )], + )) + assert_node(doctree[1], addnodes.desc, desctype="function", + domain="py", objtype="function", no_index=False) + assert_node(doctree[1][0][1], [desc_parameterlist, desc_parameter, ( + [desc_sig_name, "names"], + [desc_sig_punctuation, ":"], + desc_sig_space, + [nodes.inline, pending_xref, "str"], + )]) + assert_node(doctree[1][0][1], desc_parameterlist, multi_line_parameter_list=False) + + +@pytest.mark.sphinx('html', confoverrides={ + 'maximum_signature_line_length': len("hello(name: str) -> str"), +}) +def test_pyfunction_signature_with_maximum_signature_line_length_break(app): + text = ".. py:function:: hello(names: str) -> str" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_name, "hello"], + desc_parameterlist, + [desc_returns, pending_xref, "str"], + )], + desc_content, + )], + )) + assert_node(doctree[1], addnodes.desc, desctype="function", + domain="py", objtype="function", no_index=False) + assert_node(doctree[1][0][1], [desc_parameterlist, desc_parameter, ( + [desc_sig_name, "names"], + [desc_sig_punctuation, ":"], + desc_sig_space, + [nodes.inline, pending_xref, "str"], + )]) + assert_node(doctree[1][0][1], desc_parameterlist, multi_line_parameter_list=True) + + +@pytest.mark.sphinx( + 'html', + confoverrides={ + 'python_maximum_signature_line_length': len("hello(name: str) -> str"), + 'maximum_signature_line_length': 1, + }, +) +def test_python_maximum_signature_line_length_overrides_global(app): + text = ".. py:function:: hello(name: str) -> str" + doctree = restructuredtext.parse(app, text) + expected_doctree = (addnodes.index, + [desc, ([desc_signature, ([desc_name, "hello"], + desc_parameterlist, + [desc_returns, pending_xref, "str"])], + desc_content)]) + assert_node(doctree, expected_doctree) + assert_node(doctree[1], addnodes.desc, desctype="function", + domain="py", objtype="function", no_index=False) + signame_node = [desc_sig_name, "name"] + expected_sig = [desc_parameterlist, desc_parameter, (signame_node, + [desc_sig_punctuation, ":"], + desc_sig_space, + [nodes.inline, pending_xref, "str"])] + assert_node(doctree[1][0][1], expected_sig) + assert_node(doctree[1][0][1], desc_parameterlist, multi_line_parameter_list=False) + + +@pytest.mark.sphinx( + 'html', testroot='domain-py-python_maximum_signature_line_length', +) +def test_domain_py_python_maximum_signature_line_length_in_html(app, status, warning): + app.build() + content = (app.outdir / 'index.html').read_text(encoding='utf8') + expected_parameter_list_hello = """\ + +<dl> +<dd>\ +<em class="sig-param">\ +<span class="n"><span class="pre">name</span></span>\ +<span class="p"><span class="pre">:</span></span>\ +<span class="w"> </span>\ +<span class="n"><span class="pre">str</span></span>\ +</em>,\ +</dd> +</dl> + +<span class="sig-paren">)</span> \ +<span class="sig-return">\ +<span class="sig-return-icon">→</span> \ +<span class="sig-return-typehint"><span class="pre">str</span></span>\ +</span>\ +<a class="headerlink" href="#hello" title="Link to this definition">¶</a>\ +</dt>\ +""" + assert expected_parameter_list_hello in content + + param_line_fmt = '<dd>{}</dd>\n' + param_name_fmt = ( + '<em class="sig-param"><span class="n"><span class="pre">{}</span></span></em>' + ) + optional_fmt = '<span class="optional">{}</span>' + + expected_a = param_line_fmt.format( + optional_fmt.format("[") + param_name_fmt.format("a") + "," + optional_fmt.format("["), + ) + assert expected_a in content + + expected_b = param_line_fmt.format( + param_name_fmt.format("b") + "," + optional_fmt.format("]") + optional_fmt.format("]"), + ) + assert expected_b in content + + expected_c = param_line_fmt.format(param_name_fmt.format("c") + ",") + assert expected_c in content + + expected_d = param_line_fmt.format(param_name_fmt.format("d") + optional_fmt.format("[") + ",") + assert expected_d in content + + expected_e = param_line_fmt.format(param_name_fmt.format("e") + ",") + assert expected_e in content + + expected_f = param_line_fmt.format(param_name_fmt.format("f") + "," + optional_fmt.format("]")) + assert expected_f in content + + expected_parameter_list_foo = """\ + +<dl> +{}{}{}{}{}{}</dl> + +<span class="sig-paren">)</span>\ +<a class="headerlink" href="#foo" title="Link to this definition">¶</a>\ +</dt>\ +""".format(expected_a, expected_b, expected_c, expected_d, expected_e, expected_f) + assert expected_parameter_list_foo in content + + +@pytest.mark.sphinx( + 'text', testroot='domain-py-python_maximum_signature_line_length', +) +def test_domain_py_python_maximum_signature_line_length_in_text(app, status, warning): + app.build() + content = (app.outdir / 'index.txt').read_text(encoding='utf8') + param_line_fmt = STDINDENT * " " + "{}\n" + + expected_parameter_list_hello = "(\n{}) -> str".format(param_line_fmt.format("name: str,")) + + assert expected_parameter_list_hello in content + + expected_a = param_line_fmt.format("[a,[") + assert expected_a in content + + expected_b = param_line_fmt.format("b,]]") + assert expected_b in content + + expected_c = param_line_fmt.format("c,") + assert expected_c in content + + expected_d = param_line_fmt.format("d[,") + assert expected_d in content + + expected_e = param_line_fmt.format("e,") + assert expected_e in content + + expected_f = param_line_fmt.format("f,]") + assert expected_f in content + + expected_parameter_list_foo = "(\n{}{}{}{}{}{})".format( + expected_a, expected_b, expected_c, expected_d, expected_e, expected_f, + ) + assert expected_parameter_list_foo in content + + +def test_module_content_line_number(app): + text = (".. py:module:: foo\n" + + "\n" + + " Some link here: :ref:`abc`\n") + doc = restructuredtext.parse(app, text) + xrefs = list(doc.findall(condition=addnodes.pending_xref)) + assert len(xrefs) == 1 + source, line = docutils.utils.get_source_line(xrefs[0]) + assert 'index.rst' in source + assert line == 3 + + +@pytest.mark.sphinx(freshenv=True, confoverrides={'python_display_short_literal_types': True}) +def test_short_literal_types(app): + text = """\ +.. py:function:: literal_ints(x: Literal[1, 2, 3] = 1) -> None +.. py:function:: literal_union(x: Union[Literal["a"], Literal["b"], Literal["c"]]) -> None +""" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_name, 'literal_ints'], + [desc_parameterlist, ( + [desc_parameter, ( + [desc_sig_name, 'x'], + [desc_sig_punctuation, ':'], + desc_sig_space, + [desc_sig_name, ( + [desc_sig_literal_number, '1'], + desc_sig_space, + [desc_sig_punctuation, '|'], + desc_sig_space, + [desc_sig_literal_number, '2'], + desc_sig_space, + [desc_sig_punctuation, '|'], + desc_sig_space, + [desc_sig_literal_number, '3'], + )], + desc_sig_space, + [desc_sig_operator, '='], + desc_sig_space, + [nodes.inline, '1'], + )], + )], + [desc_returns, pending_xref, 'None'], + )], + [desc_content, ()], + )], + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_name, 'literal_union'], + [desc_parameterlist, ( + [desc_parameter, ( + [desc_sig_name, 'x'], + [desc_sig_punctuation, ':'], + desc_sig_space, + [desc_sig_name, ( + [desc_sig_literal_string, "'a'"], + desc_sig_space, + [desc_sig_punctuation, '|'], + desc_sig_space, + [desc_sig_literal_string, "'b'"], + desc_sig_space, + [desc_sig_punctuation, '|'], + desc_sig_space, + [desc_sig_literal_string, "'c'"], + )], + )], + )], + [desc_returns, pending_xref, 'None'], + )], + [desc_content, ()], + )], + )) + + +def test_function_pep_695(app): + text = """.. py:function:: func[\ + S,\ + T: int,\ + U: (int, str),\ + R: int | int,\ + A: int | Annotated[int, ctype("char")],\ + *V,\ + **P\ + ] + """ + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_name, 'func'], + [desc_type_parameter_list, ( + [desc_type_parameter, ([desc_sig_name, 'S'])], + [desc_type_parameter, ( + [desc_sig_name, 'T'], + [desc_sig_punctuation, ':'], + desc_sig_space, + [desc_sig_name, ([pending_xref, 'int'])], + )], + [desc_type_parameter, ( + [desc_sig_name, 'U'], + [desc_sig_punctuation, ':'], + desc_sig_space, + [desc_sig_punctuation, '('], + [desc_sig_name, ( + [pending_xref, 'int'], + [desc_sig_punctuation, ','], + desc_sig_space, + [pending_xref, 'str'], + )], + [desc_sig_punctuation, ')'], + )], + [desc_type_parameter, ( + [desc_sig_name, 'R'], + [desc_sig_punctuation, ':'], + desc_sig_space, + [desc_sig_name, ( + [pending_xref, 'int'], + desc_sig_space, + [desc_sig_punctuation, '|'], + desc_sig_space, + [pending_xref, 'int'], + )], + )], + [desc_type_parameter, ( + [desc_sig_name, 'A'], + [desc_sig_punctuation, ':'], + desc_sig_space, + [desc_sig_name, ([pending_xref, 'int | Annotated[int, ctype("char")]'])], + )], + [desc_type_parameter, ( + [desc_sig_operator, '*'], + [desc_sig_name, 'V'], + )], + [desc_type_parameter, ( + [desc_sig_operator, '**'], + [desc_sig_name, 'P'], + )], + )], + [desc_parameterlist, ()], + )], + [desc_content, ()], + )], + )) + + +def test_class_def_pep_695(app): + # Non-concrete unbound generics are allowed at runtime but type checkers + # should fail (https://peps.python.org/pep-0695/#type-parameter-scopes) + text = """.. py:class:: Class[S: Sequence[T], T, KT, VT](Dict[KT, VT])""" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_annotation, ('class', desc_sig_space)], + [desc_name, 'Class'], + [desc_type_parameter_list, ( + [desc_type_parameter, ( + [desc_sig_name, 'S'], + [desc_sig_punctuation, ':'], + desc_sig_space, + [desc_sig_name, ( + [pending_xref, 'Sequence'], + [desc_sig_punctuation, '['], + [pending_xref, 'T'], + [desc_sig_punctuation, ']'], + )], + )], + [desc_type_parameter, ([desc_sig_name, 'T'])], + [desc_type_parameter, ([desc_sig_name, 'KT'])], + [desc_type_parameter, ([desc_sig_name, 'VT'])], + )], + [desc_parameterlist, ([desc_parameter, 'Dict[KT, VT]'])], + )], + [desc_content, ()], + )], + )) + + +def test_class_def_pep_696(app): + # test default values for type variables without using PEP 696 AST parser + text = """.. py:class:: Class[\ + T, KT, VT,\ + J: int,\ + K = list,\ + S: str = str,\ + L: (T, tuple[T, ...], collections.abc.Iterable[T]) = set[T],\ + Q: collections.abc.Mapping[KT, VT] = dict[KT, VT],\ + *V = *tuple[*Ts, bool],\ + **P = [int, Annotated[int, ValueRange(3, 10), ctype("char")]]\ + ](Other[T, KT, VT, J, S, L, Q, *V, **P]) + """ + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + addnodes.index, + [desc, ( + [desc_signature, ( + [desc_annotation, ('class', desc_sig_space)], + [desc_name, 'Class'], + [desc_type_parameter_list, ( + [desc_type_parameter, ([desc_sig_name, 'T'])], + [desc_type_parameter, ([desc_sig_name, 'KT'])], + [desc_type_parameter, ([desc_sig_name, 'VT'])], + # J: int + [desc_type_parameter, ( + [desc_sig_name, 'J'], + [desc_sig_punctuation, ':'], + desc_sig_space, + [desc_sig_name, ([pending_xref, 'int'])], + )], + # K = list + [desc_type_parameter, ( + [desc_sig_name, 'K'], + desc_sig_space, + [desc_sig_operator, '='], + desc_sig_space, + [nodes.inline, 'list'], + )], + # S: str = str + [desc_type_parameter, ( + [desc_sig_name, 'S'], + [desc_sig_punctuation, ':'], + desc_sig_space, + [desc_sig_name, ([pending_xref, 'str'])], + desc_sig_space, + [desc_sig_operator, '='], + desc_sig_space, + [nodes.inline, 'str'], + )], + [desc_type_parameter, ( + [desc_sig_name, 'L'], + [desc_sig_punctuation, ':'], + desc_sig_space, + [desc_sig_punctuation, '('], + [desc_sig_name, ( + # T + [pending_xref, 'T'], + [desc_sig_punctuation, ','], + desc_sig_space, + # tuple[T, ...] + [pending_xref, 'tuple'], + [desc_sig_punctuation, '['], + [pending_xref, 'T'], + [desc_sig_punctuation, ','], + desc_sig_space, + [desc_sig_punctuation, '...'], + [desc_sig_punctuation, ']'], + [desc_sig_punctuation, ','], + desc_sig_space, + # collections.abc.Iterable[T] + [pending_xref, 'collections.abc.Iterable'], + [desc_sig_punctuation, '['], + [pending_xref, 'T'], + [desc_sig_punctuation, ']'], + )], + [desc_sig_punctuation, ')'], + desc_sig_space, + [desc_sig_operator, '='], + desc_sig_space, + [nodes.inline, 'set[T]'], + )], + [desc_type_parameter, ( + [desc_sig_name, 'Q'], + [desc_sig_punctuation, ':'], + desc_sig_space, + [desc_sig_name, ( + [pending_xref, 'collections.abc.Mapping'], + [desc_sig_punctuation, '['], + [pending_xref, 'KT'], + [desc_sig_punctuation, ','], + desc_sig_space, + [pending_xref, 'VT'], + [desc_sig_punctuation, ']'], + )], + desc_sig_space, + [desc_sig_operator, '='], + desc_sig_space, + [nodes.inline, 'dict[KT, VT]'], + )], + [desc_type_parameter, ( + [desc_sig_operator, '*'], + [desc_sig_name, 'V'], + desc_sig_space, + [desc_sig_operator, '='], + desc_sig_space, + [nodes.inline, '*tuple[*Ts, bool]'], + )], + [desc_type_parameter, ( + [desc_sig_operator, '**'], + [desc_sig_name, 'P'], + desc_sig_space, + [desc_sig_operator, '='], + desc_sig_space, + [nodes.inline, '[int, Annotated[int, ValueRange(3, 10), ctype("char")]]'], + )], + )], + [desc_parameterlist, ( + [desc_parameter, 'Other[T, KT, VT, J, S, L, Q, *V, **P]'], + )], + )], + [desc_content, ()], + )], + )) + + +@pytest.mark.parametrize(('tp_list', 'tptext'), [ + ('[T:int]', '[T: int]'), + ('[T:*Ts]', '[T: *Ts]'), + ('[T:int|(*Ts)]', '[T: int | (*Ts)]'), + ('[T:(*Ts)|int]', '[T: (*Ts) | int]'), + ('[T:(int|(*Ts))]', '[T: (int | (*Ts))]'), + ('[T:((*Ts)|int)]', '[T: ((*Ts) | int)]'), + ('[T:Annotated[int,ctype("char")]]', '[T: Annotated[int, ctype("char")]]'), +]) +def test_pep_695_and_pep_696_whitespaces_in_bound(app, tp_list, tptext): + text = f'.. py:function:: f{tp_list}()' + doctree = restructuredtext.parse(app, text) + assert doctree.astext() == f'\n\nf{tptext}()\n\n' + + +@pytest.mark.parametrize(('tp_list', 'tptext'), [ + ('[T:(int,str)]', '[T: (int, str)]'), + ('[T:(int|str,*Ts)]', '[T: (int | str, *Ts)]'), +]) +def test_pep_695_and_pep_696_whitespaces_in_constraints(app, tp_list, tptext): + text = f'.. py:function:: f{tp_list}()' + doctree = restructuredtext.parse(app, text) + assert doctree.astext() == f'\n\nf{tptext}()\n\n' + + +@pytest.mark.parametrize(('tp_list', 'tptext'), [ + ('[T=int]', '[T = int]'), + ('[T:int=int]', '[T: int = int]'), + ('[*V=*Ts]', '[*V = *Ts]'), + ('[*V=(*Ts)]', '[*V = (*Ts)]'), + ('[*V=*tuple[str,...]]', '[*V = *tuple[str, ...]]'), + ('[*V=*tuple[*Ts,...]]', '[*V = *tuple[*Ts, ...]]'), + ('[*V=*tuple[int,*Ts]]', '[*V = *tuple[int, *Ts]]'), + ('[*V=*tuple[*Ts,int]]', '[*V = *tuple[*Ts, int]]'), + ('[**P=[int,*Ts]]', '[**P = [int, *Ts]]'), + ('[**P=[int, int*3]]', '[**P = [int, int * 3]]'), + ('[**P=[int, *Ts*3]]', '[**P = [int, *Ts * 3]]'), + ('[**P=[int,A[int,ctype("char")]]]', '[**P = [int, A[int, ctype("char")]]]'), +]) +def test_pep_695_and_pep_696_whitespaces_in_default(app, tp_list, tptext): + text = f'.. py:function:: f{tp_list}()' + doctree = restructuredtext.parse(app, text) + assert doctree.astext() == f'\n\nf{tptext}()\n\n' diff --git a/tests/test_domain_rst.py b/tests/test_domain_rst.py new file mode 100644 index 0000000..4445da1 --- /dev/null +++ b/tests/test_domain_rst.py @@ -0,0 +1,137 @@ +"""Tests the reStructuredText domain.""" + +from sphinx import addnodes +from sphinx.addnodes import ( + desc, + desc_addname, + desc_annotation, + desc_content, + desc_name, + desc_signature, +) +from sphinx.domains.rst import parse_directive +from sphinx.testing import restructuredtext +from sphinx.testing.util import assert_node + + +def test_parse_directive(): + s = parse_directive(' foö ') + assert s == ('foö', '') + + s = parse_directive(' .. foö :: ') + assert s == ('foö', '') + + s = parse_directive('.. foö:: args1 args2') + assert s == ('foö', ' args1 args2') + + s = parse_directive('.. :: bar') + assert s == ('.. :: bar', '') + + +def test_rst_directive(app): + # bare + text = ".. rst:directive:: toctree" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, desc_name, ".. toctree::"], + [desc_content, ()])])) + assert_node(doctree[0], + entries=[("single", "toctree (directive)", "directive-toctree", "", None)]) + assert_node(doctree[1], addnodes.desc, desctype="directive", + domain="rst", objtype="directive", no_index=False) + + # decorated + text = ".. rst:directive:: .. toctree::" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, desc_name, ".. toctree::"], + [desc_content, ()])])) + assert_node(doctree[0], + entries=[("single", "toctree (directive)", "directive-toctree", "", None)]) + assert_node(doctree[1], addnodes.desc, desctype="directive", + domain="rst", objtype="directive", no_index=False) + + +def test_rst_directive_with_argument(app): + text = ".. rst:directive:: .. toctree:: foo bar baz" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_name, ".. toctree::"], + [desc_addname, " foo bar baz"])], + [desc_content, ()])])) + assert_node(doctree[0], + entries=[("single", "toctree (directive)", "directive-toctree", "", None)]) + assert_node(doctree[1], addnodes.desc, desctype="directive", + domain="rst", objtype="directive", no_index=False) + + +def test_rst_directive_option(app): + text = ".. rst:directive:option:: foo" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, desc_name, ":foo:"], + [desc_content, ()])])) + assert_node(doctree[0], + entries=[("single", ":foo: (directive option)", + "directive-option-foo", "", "F")]) + assert_node(doctree[1], addnodes.desc, desctype="directive:option", + domain="rst", objtype="directive:option", no_index=False) + + +def test_rst_directive_option_with_argument(app): + text = ".. rst:directive:option:: foo: bar baz" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_name, ":foo:"], + [desc_annotation, " bar baz"])], + [desc_content, ()])])) + assert_node(doctree[0], + entries=[("single", ":foo: (directive option)", + "directive-option-foo", "", "F")]) + assert_node(doctree[1], addnodes.desc, desctype="directive:option", + domain="rst", objtype="directive:option", no_index=False) + + +def test_rst_directive_option_type(app): + text = (".. rst:directive:option:: foo\n" + " :type: directives.flags\n") + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_name, ":foo:"], + [desc_annotation, " (directives.flags)"])], + [desc_content, ()])])) + assert_node(doctree[0], + entries=[("single", ":foo: (directive option)", + "directive-option-foo", "", "F")]) + assert_node(doctree[1], addnodes.desc, desctype="directive:option", + domain="rst", objtype="directive:option", no_index=False) + + +def test_rst_directive_and_directive_option(app): + text = (".. rst:directive:: foo\n" + "\n" + " .. rst:directive:option:: bar\n") + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, desc_name, ".. foo::"], + [desc_content, (addnodes.index, + desc)])])) + assert_node(doctree[1][1][0], + entries=[("pair", "foo (directive); :bar: (directive option)", + "directive-option-foo-bar", "", "B")]) + assert_node(doctree[1][1][1], ([desc_signature, desc_name, ":bar:"], + [desc_content, ()])) + assert_node(doctree[1][1][1], addnodes.desc, desctype="directive:option", + domain="rst", objtype="directive:option", no_index=False) + + +def test_rst_role(app): + text = ".. rst:role:: ref" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, desc_name, ":ref:"], + [desc_content, ()])])) + assert_node(doctree[0], + entries=[("single", "ref (role)", "role-ref", "", None)]) + assert_node(doctree[1], addnodes.desc, desctype="role", + domain="rst", objtype="role", no_index=False) diff --git a/tests/test_domain_std.py b/tests/test_domain_std.py new file mode 100644 index 0000000..6d7ab53 --- /dev/null +++ b/tests/test_domain_std.py @@ -0,0 +1,495 @@ +"""Tests the std domain""" + +from unittest import mock + +import pytest +from docutils import nodes +from docutils.nodes import definition, definition_list, definition_list_item, term +from html5lib import HTMLParser + +from sphinx import addnodes +from sphinx.addnodes import ( + desc, + desc_addname, + desc_content, + desc_name, + desc_signature, + glossary, + index, + pending_xref, +) +from sphinx.domains.std import StandardDomain +from sphinx.testing import restructuredtext +from sphinx.testing.util import assert_node + + +def test_process_doc_handle_figure_caption(): + env = mock.Mock(domaindata={}) + env.app.registry.enumerable_nodes = {} + figure_node = nodes.figure( + '', + nodes.caption('caption text', 'caption text'), + ) + document = mock.Mock( + nametypes={'testname': True}, + nameids={'testname': 'testid'}, + ids={'testid': figure_node}, + citation_refs={}, + ) + document.findall.return_value = [] + + domain = StandardDomain(env) + if 'testname' in domain.data['labels']: + del domain.data['labels']['testname'] + domain.process_doc(env, 'testdoc', document) + assert 'testname' in domain.data['labels'] + assert domain.data['labels']['testname'] == ( + 'testdoc', 'testid', 'caption text') + + +def test_process_doc_handle_table_title(): + env = mock.Mock(domaindata={}) + env.app.registry.enumerable_nodes = {} + table_node = nodes.table( + '', + nodes.title('title text', 'title text'), + ) + document = mock.Mock( + nametypes={'testname': True}, + nameids={'testname': 'testid'}, + ids={'testid': table_node}, + citation_refs={}, + ) + document.findall.return_value = [] + + domain = StandardDomain(env) + if 'testname' in domain.data['labels']: + del domain.data['labels']['testname'] + domain.process_doc(env, 'testdoc', document) + assert 'testname' in domain.data['labels'] + assert domain.data['labels']['testname'] == ( + 'testdoc', 'testid', 'title text') + + +def test_get_full_qualified_name(): + env = mock.Mock(domaindata={}) + env.app.registry.enumerable_nodes = {} + domain = StandardDomain(env) + + # normal references + node = nodes.reference() + assert domain.get_full_qualified_name(node) is None + + # simple reference to options + node = nodes.reference(reftype='option', reftarget='-l') + assert domain.get_full_qualified_name(node) is None + + # options with std:program context + kwargs = {'std:program': 'ls'} + node = nodes.reference(reftype='option', reftarget='-l', **kwargs) + assert domain.get_full_qualified_name(node) == 'ls.-l' + + +def test_cmd_option_with_optional_value(app): + text = ".. option:: -j[=N]" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (index, + [desc, ([desc_signature, ([desc_name, '-j'], + [desc_addname, '[=N]'])], + [desc_content, ()])])) + assert_node(doctree[0], addnodes.index, + entries=[('pair', 'command line option; -j', 'cmdoption-j', '', None)]) + + objects = list(app.env.get_domain("std").get_objects()) + assert ('-j', '-j', 'cmdoption', 'index', 'cmdoption-j', 1) in objects + + +def test_cmd_option_starting_with_bracket(app): + text = ".. option:: [enable=]PATTERN" + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (index, + [desc, ([desc_signature, ([desc_name, '[enable'], + [desc_addname, '=]PATTERN'])], + [desc_content, ()])])) + objects = list(app.env.get_domain("std").get_objects()) + assert ('[enable', '[enable', 'cmdoption', 'index', 'cmdoption-arg-enable', 1) in objects + + +def test_glossary(app): + text = (".. glossary::\n" + "\n" + " term1\n" + " TERM2\n" + " description\n" + "\n" + " term3 : classifier\n" + " description\n" + " description\n" + "\n" + " term4 : class1 : class2\n" + " description\n") + + # doctree + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + [glossary, definition_list, ([definition_list_item, ([term, ("term1", + index)], + [term, ("TERM2", + index)], + definition)], + [definition_list_item, ([term, ("term3", + index)], + definition)], + [definition_list_item, ([term, ("term4", + index)], + definition)])], + )) + assert_node(doctree[0][0][0][0][1], + entries=[("single", "term1", "term-term1", "main", None)]) + assert_node(doctree[0][0][0][1][1], + entries=[("single", "TERM2", "term-TERM2", "main", None)]) + assert_node(doctree[0][0][0][2], + [definition, nodes.paragraph, "description"]) + assert_node(doctree[0][0][1][0][1], + entries=[("single", "term3", "term-term3", "main", "classifier")]) + assert_node(doctree[0][0][1][1], + [definition, nodes.paragraph, ("description\n" + "description")]) + assert_node(doctree[0][0][2][0][1], + entries=[("single", "term4", "term-term4", "main", "class1")]) + assert_node(doctree[0][0][2][1], + [nodes.definition, nodes.paragraph, "description"]) + + # index + domain = app.env.get_domain("std") + objects = list(domain.get_objects()) + assert ("term1", "term1", "term", "index", "term-term1", -1) in objects + assert ("TERM2", "TERM2", "term", "index", "term-TERM2", -1) in objects + assert ("term3", "term3", "term", "index", "term-term3", -1) in objects + assert ("term4", "term4", "term", "index", "term-term4", -1) in objects + + # term reference (case sensitive) + refnode = domain.resolve_xref(app.env, 'index', app.builder, 'term', 'term1', + pending_xref(), nodes.paragraph()) + assert_node(refnode, nodes.reference, refid="term-term1") + + # term reference (case insensitive) + refnode = domain.resolve_xref(app.env, 'index', app.builder, 'term', 'term2', + pending_xref(), nodes.paragraph()) + assert_node(refnode, nodes.reference, refid="term-TERM2") + + +def test_glossary_warning(app, status, warning): + # empty line between terms + text = (".. glossary::\n" + "\n" + " term1\n" + "\n" + " term2\n") + restructuredtext.parse(app, text, "case1") + assert ("case1.rst:4: WARNING: glossary terms must not be separated by empty lines" + in warning.getvalue()) + + # glossary starts with indented item + text = (".. glossary::\n" + "\n" + " description\n" + " term\n") + restructuredtext.parse(app, text, "case2") + assert ("case2.rst:3: WARNING: glossary term must be preceded by empty line" + in warning.getvalue()) + + # empty line between terms + text = (".. glossary::\n" + "\n" + " term1\n" + " description\n" + " term2\n") + restructuredtext.parse(app, text, "case3") + assert ("case3.rst:4: WARNING: glossary term must be preceded by empty line" + in warning.getvalue()) + + # duplicated terms + text = (".. glossary::\n" + "\n" + " term-case4\n" + " term-case4\n") + restructuredtext.parse(app, text, "case4") + assert ("case4.rst:3: WARNING: duplicate term description of term-case4, " + "other instance in case4" in warning.getvalue()) + + +def test_glossary_comment(app): + text = (".. glossary::\n" + "\n" + " term1\n" + " description\n" + " .. term2\n" + " description\n" + " description\n") + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + [glossary, definition_list, definition_list_item, ([term, ("term1", + index)], + definition)], + )) + assert_node(doctree[0][0][0][1], + [nodes.definition, nodes.paragraph, "description"]) + + +def test_glossary_comment2(app): + text = (".. glossary::\n" + "\n" + " term1\n" + " description\n" + "\n" + " .. term2\n" + " term3\n" + " description\n" + " description\n") + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + [glossary, definition_list, ([definition_list_item, ([term, ("term1", + index)], + definition)], + [definition_list_item, ([term, ("term3", + index)], + definition)])], + )) + assert_node(doctree[0][0][0][1], + [nodes.definition, nodes.paragraph, "description"]) + assert_node(doctree[0][0][1][1], + [nodes.definition, nodes.paragraph, ("description\n" + "description")]) + + +def test_glossary_sorted(app): + text = (".. glossary::\n" + " :sorted:\n" + "\n" + " term3\n" + " description\n" + "\n" + " term2\n" + " term1\n" + " description\n") + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ( + [glossary, definition_list, ([definition_list_item, ([term, ("term2", + index)], + [term, ("term1", + index)], + definition)], + [definition_list_item, ([term, ("term3", + index)], + definition)])], + )) + assert_node(doctree[0][0][0][2], + [nodes.definition, nodes.paragraph, "description"]) + assert_node(doctree[0][0][1][1], + [nodes.definition, nodes.paragraph, "description"]) + + +def test_glossary_alphanumeric(app): + text = (".. glossary::\n" + "\n" + " 1\n" + " /\n") + restructuredtext.parse(app, text) + objects = list(app.env.get_domain("std").get_objects()) + assert ("1", "1", "term", "index", "term-1", -1) in objects + assert ("/", "/", "term", "index", "term-0", -1) in objects + + +def test_glossary_conflicted_labels(app): + text = (".. _term-foo:\n" + ".. glossary::\n" + "\n" + " foo\n") + restructuredtext.parse(app, text) + objects = list(app.env.get_domain("std").get_objects()) + assert ("foo", "foo", "term", "index", "term-0", -1) in objects + + +def test_cmdoption(app): + text = (".. program:: ls\n" + "\n" + ".. option:: -l\n") + domain = app.env.get_domain('std') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_name, "-l"], + [desc_addname, ()])], + [desc_content, ()])])) + assert_node(doctree[0], addnodes.index, + entries=[('pair', 'ls command line option; -l', 'cmdoption-ls-l', '', None)]) + assert ('ls', '-l') in domain.progoptions + assert domain.progoptions[('ls', '-l')] == ('index', 'cmdoption-ls-l') + + +def test_cmdoption_for_None(app): + text = (".. program:: ls\n" + ".. program:: None\n" + "\n" + ".. option:: -l\n") + domain = app.env.get_domain('std') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_name, "-l"], + [desc_addname, ()])], + [desc_content, ()])])) + assert_node(doctree[0], addnodes.index, + entries=[('pair', 'command line option; -l', 'cmdoption-l', '', None)]) + assert (None, '-l') in domain.progoptions + assert domain.progoptions[(None, '-l')] == ('index', 'cmdoption-l') + + +def test_multiple_cmdoptions(app): + text = (".. program:: cmd\n" + "\n" + ".. option:: -o directory, --output directory\n") + domain = app.env.get_domain('std') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + [desc, ([desc_signature, ([desc_name, "-o"], + [desc_addname, " directory"], + [desc_addname, ", "], + [desc_name, "--output"], + [desc_addname, " directory"])], + [desc_content, ()])])) + assert_node(doctree[0], addnodes.index, + entries=[('pair', 'cmd command line option; -o', 'cmdoption-cmd-o', '', None), + ('pair', 'cmd command line option; --output', 'cmdoption-cmd-o', '', None)]) + assert ('cmd', '-o') in domain.progoptions + assert ('cmd', '--output') in domain.progoptions + assert domain.progoptions[('cmd', '-o')] == ('index', 'cmdoption-cmd-o') + assert domain.progoptions[('cmd', '--output')] == ('index', 'cmdoption-cmd-o') + + +@pytest.mark.sphinx(testroot='productionlist') +def test_productionlist(app, status, warning): + app.builder.build_all() + + warnings = warning.getvalue().split("\n") + assert len(warnings) == 2 + assert warnings[-1] == '' + assert "Dup2.rst:4: WARNING: duplicate token description of Dup, other instance in Dup1" in warnings[0] + + with (app.outdir / 'index.html').open('rb') as f: + etree = HTMLParser(namespaceHTMLElements=False).parse(f) + ul = list(etree.iter('ul'))[1] + cases = [] + for li in list(ul): + assert len(list(li)) == 1 + p = list(li)[0] + assert p.tag == 'p' + text = str(p.text).strip(' :') + assert len(list(p)) == 1 + a = list(p)[0] + assert a.tag == 'a' + link = a.get('href') + assert len(list(a)) == 1 + code = list(a)[0] + assert code.tag == 'code' + assert len(list(code)) == 1 + span = list(code)[0] + assert span.tag == 'span' + linkText = span.text.strip() + cases.append((text, link, linkText)) + assert cases == [ + ('A', 'Bare.html#grammar-token-A', 'A'), + ('B', 'Bare.html#grammar-token-B', 'B'), + ('P1:A', 'P1.html#grammar-token-P1-A', 'P1:A'), + ('P1:B', 'P1.html#grammar-token-P1-B', 'P1:B'), + ('P2:A', 'P1.html#grammar-token-P1-A', 'P1:A'), + ('P2:B', 'P2.html#grammar-token-P2-B', 'P2:B'), + ('Explicit title A, plain', 'Bare.html#grammar-token-A', 'MyTitle'), + ('Explicit title A, colon', 'Bare.html#grammar-token-A', 'My:Title'), + ('Explicit title P1:A, plain', 'P1.html#grammar-token-P1-A', 'MyTitle'), + ('Explicit title P1:A, colon', 'P1.html#grammar-token-P1-A', 'My:Title'), + ('Tilde A', 'Bare.html#grammar-token-A', 'A'), + ('Tilde P1:A', 'P1.html#grammar-token-P1-A', 'A'), + ('Tilde explicit title P1:A', 'P1.html#grammar-token-P1-A', '~MyTitle'), + ('Tilde, explicit title P1:A', 'P1.html#grammar-token-P1-A', 'MyTitle'), + ('Dup', 'Dup2.html#grammar-token-Dup', 'Dup'), + ('FirstLine', 'firstLineRule.html#grammar-token-FirstLine', 'FirstLine'), + ('SecondLine', 'firstLineRule.html#grammar-token-SecondLine', 'SecondLine'), + ] + + text = (app.outdir / 'LineContinuation.html').read_text(encoding='utf8') + assert "A</strong> ::= B C D E F G" in text + + +def test_productionlist2(app): + text = (".. productionlist:: P2\n" + " A: `:A` `A`\n" + " B: `P1:B` `~P1:B`\n") + doctree = restructuredtext.parse(app, text) + refnodes = list(doctree.findall(pending_xref)) + assert_node(refnodes[0], pending_xref, reftarget="A") + assert_node(refnodes[1], pending_xref, reftarget="P2:A") + assert_node(refnodes[2], pending_xref, reftarget="P1:B") + assert_node(refnodes[3], pending_xref, reftarget="P1:B") + assert_node(refnodes[0], [pending_xref, nodes.literal, "A"]) + assert_node(refnodes[1], [pending_xref, nodes.literal, "A"]) + assert_node(refnodes[2], [pending_xref, nodes.literal, "P1:B"]) + assert_node(refnodes[3], [pending_xref, nodes.literal, "B"]) + + +def test_disabled_docref(app): + text = (":doc:`index`\n" + ":doc:`!index`\n") + doctree = restructuredtext.parse(app, text) + assert_node(doctree, ([nodes.paragraph, ([pending_xref, nodes.inline, "index"], + "\n", + [nodes.inline, "index"])],)) + + +def test_labeled_rubric(app): + text = (".. _label:\n" + ".. rubric:: blah *blah* blah\n") + restructuredtext.parse(app, text) + + domain = app.env.get_domain("std") + assert 'label' in domain.labels + assert domain.labels['label'] == ('index', 'label', 'blah blah blah') + + +def test_labeled_definition(app): + text = (".. _label1:\n" + "\n" + "Foo blah *blah* blah\n" + " Definition\n" + "\n" + ".. _label2:\n" + "\n" + "Bar blah *blah* blah\n" + " Definition\n" + "\n") + restructuredtext.parse(app, text) + + domain = app.env.get_domain("std") + assert 'label1' in domain.labels + assert domain.labels['label1'] == ('index', 'label1', 'Foo blah blah blah') + assert 'label2' in domain.labels + assert domain.labels['label2'] == ('index', 'label2', 'Bar blah blah blah') + + +def test_labeled_field(app): + text = (".. _label1:\n" + "\n" + ":Foo blah *blah* blah:\n" + " Definition\n" + "\n" + ".. _label2:\n" + "\n" + ":Bar blah *blah* blah:\n" + " Definition\n" + "\n") + restructuredtext.parse(app, text) + + domain = app.env.get_domain("std") + assert 'label1' in domain.labels + assert domain.labels['label1'] == ('index', 'label1', 'Foo blah blah blah') + assert 'label2' in domain.labels + assert domain.labels['label2'] == ('index', 'label2', 'Bar blah blah blah') diff --git a/tests/test_environment.py b/tests/test_environment.py new file mode 100644 index 0000000..8a34457 --- /dev/null +++ b/tests/test_environment.py @@ -0,0 +1,145 @@ +"""Test the BuildEnvironment class.""" +import os +import shutil +from pathlib import Path + +import pytest + +from sphinx.builders.html import StandaloneHTMLBuilder +from sphinx.builders.latex import LaTeXBuilder +from sphinx.environment import CONFIG_CHANGED, CONFIG_EXTENSIONS_CHANGED, CONFIG_NEW, CONFIG_OK + + +@pytest.mark.sphinx('dummy', testroot='basic') +def test_config_status(make_app, app_params): + args, kwargs = app_params + + # clean build + app1 = make_app(*args, freshenv=True, **kwargs) + assert app1.env.config_status == CONFIG_NEW + app1.build() + assert '[new config] 1 added' in app1._status.getvalue() + + # incremental build (no config changed) + app2 = make_app(*args, **kwargs) + assert app2.env.config_status == CONFIG_OK + app2.build() + assert "0 added, 0 changed, 0 removed" in app2._status.getvalue() + + # incremental build (config entry changed) + app3 = make_app(*args, confoverrides={'root_doc': 'indexx'}, **kwargs) + fname = os.path.join(app3.srcdir, 'index.rst') + assert os.path.isfile(fname) + shutil.move(fname, fname[:-4] + 'x.rst') + assert app3.env.config_status == CONFIG_CHANGED + app3.build() + shutil.move(fname[:-4] + 'x.rst', fname) + assert "[config changed ('root_doc')] 1 added" in app3._status.getvalue() + + # incremental build (extension changed) + app4 = make_app(*args, confoverrides={'extensions': ['sphinx.ext.autodoc']}, **kwargs) + assert app4.env.config_status == CONFIG_EXTENSIONS_CHANGED + app4.build() + want_str = "[extensions changed ('sphinx.ext.autodoc')] 1 added" + assert want_str in app4._status.getvalue() + + +@pytest.mark.sphinx('dummy') +def test_images(app): + app.build() + + tree = app.env.get_doctree('images') + htmlbuilder = StandaloneHTMLBuilder(app, app.env) + htmlbuilder.init() + htmlbuilder.imgpath = 'dummy' + htmlbuilder.post_process_images(tree) + assert set(htmlbuilder.images.keys()) == \ + {'subdir/img.png', 'img.png', 'subdir/simg.png', 'svgimg.svg', 'img.foo.png'} + assert set(htmlbuilder.images.values()) == \ + {'img.png', 'img1.png', 'simg.png', 'svgimg.svg', 'img.foo.png'} + + latexbuilder = LaTeXBuilder(app, app.env) + latexbuilder.init() + latexbuilder.post_process_images(tree) + assert set(latexbuilder.images.keys()) == \ + {'subdir/img.png', 'subdir/simg.png', 'img.png', 'img.pdf', + 'svgimg.pdf', 'img.foo.png'} + assert set(latexbuilder.images.values()) == \ + {'img.pdf', 'img.png', 'img1.png', 'simg.png', + 'svgimg.pdf', 'img.foo.png'} + + +@pytest.mark.sphinx('dummy') +def test_object_inventory(app): + app.build() + refs = app.env.domaindata['py']['objects'] + + assert 'func_without_module' in refs + assert refs['func_without_module'] == ('objects', 'func_without_module', 'function', False) + assert 'func_without_module2' in refs + assert 'mod.func_in_module' in refs + assert 'mod.Cls' in refs + assert 'mod.Cls.meth1' in refs + assert 'mod.Cls.meth2' in refs + assert 'mod.Cls.meths' in refs + + assert 'mod.Error' not in refs + assert 'errmod.Error' in refs + + assert 'func_in_module' not in refs + assert 'func_noindex' not in refs + + assert app.env.domaindata['py']['modules']['mod'] == \ + ('objects', 'module-mod', 'Module synopsis.', 'UNIX', False) + + assert app.env.domains['py'].data is app.env.domaindata['py'] + assert app.env.domains['c'].data is app.env.domaindata['c'] + + +@pytest.mark.sphinx('dummy', testroot='basic') +def test_env_relfn2path(app): + # relative filename and root document + relfn, absfn = app.env.relfn2path('logo.jpg', 'index') + assert relfn == 'logo.jpg' + assert absfn == str(app.srcdir / 'logo.jpg') + + # absolute filename and root document + relfn, absfn = app.env.relfn2path('/logo.jpg', 'index') + assert relfn == 'logo.jpg' + assert absfn == str(app.srcdir / 'logo.jpg') + + # relative filename and a document in subdir + relfn, absfn = app.env.relfn2path('logo.jpg', 'subdir/index') + assert Path(relfn) == Path('subdir/logo.jpg') + assert absfn == str(app.srcdir / 'subdir' / 'logo.jpg') + + # absolute filename and a document in subdir + relfn, absfn = app.env.relfn2path('/logo.jpg', 'subdir/index') + assert relfn == 'logo.jpg' + assert absfn == str(app.srcdir / 'logo.jpg') + + # relative filename having subdir + relfn, absfn = app.env.relfn2path('images/logo.jpg', 'index') + assert relfn == 'images/logo.jpg' + assert absfn == str(app.srcdir / 'images' / 'logo.jpg') + + # relative path traversal + relfn, absfn = app.env.relfn2path('../logo.jpg', 'index') + assert relfn == '../logo.jpg' + assert absfn == str(app.srcdir.parent / 'logo.jpg') + + # relative path traversal + relfn, absfn = app.env.relfn2path('subdir/../logo.jpg', 'index') + assert relfn == 'logo.jpg' + assert absfn == str(app.srcdir / 'logo.jpg') + + # omit docname (w/ current docname) + app.env.temp_data['docname'] = 'subdir/document' + relfn, absfn = app.env.relfn2path('images/logo.jpg') + assert Path(relfn) == Path('subdir/images/logo.jpg') + assert absfn == str(app.srcdir / 'subdir' / 'images' / 'logo.jpg') + + # omit docname (w/o current docname) + app.env.temp_data.clear() + with pytest.raises(KeyError): + app.env.relfn2path('images/logo.jpg') diff --git a/tests/test_environment_indexentries.py b/tests/test_environment_indexentries.py new file mode 100644 index 0000000..4cfdc28 --- /dev/null +++ b/tests/test_environment_indexentries.py @@ -0,0 +1,166 @@ +"""Test the sphinx.environment.adapters.indexentries.""" + +import pytest + +from sphinx.environment.adapters.indexentries import IndexEntries +from sphinx.testing import restructuredtext + + +@pytest.mark.sphinx('dummy', freshenv=True) +def test_create_single_index(app): + text = (".. index:: docutils\n" + ".. index:: Python\n" + ".. index:: pip; install\n" + ".. index:: pip; upgrade\n" + ".. index:: Sphinx\n" + ".. index:: Ель\n" + ".. index:: ёлка\n" + ".. index:: \N{RIGHT-TO-LEFT MARK}\u05e2\u05d1\u05e8\u05d9\u05ea\N{LEFT-TO-RIGHT MARK}\n" + ".. index:: 9-symbol\n" + ".. index:: &-symbol\n" + ".. index:: £100\n") + restructuredtext.parse(app, text) + index = IndexEntries(app.env).create_index(app.builder) + assert len(index) == 6 + assert index[0] == ('Symbols', [('&-symbol', [[('', '#index-9')], [], None]), + ('9-symbol', [[('', '#index-8')], [], None]), + ('£100', [[('', '#index-10')], [], None])]) + assert index[1] == ('D', [('docutils', [[('', '#index-0')], [], None])]) + assert index[2] == ('P', [('pip', [[], [('install', [('', '#index-2')]), + ('upgrade', [('', '#index-3')])], None]), + ('Python', [[('', '#index-1')], [], None])]) + assert index[3] == ('S', [('Sphinx', [[('', '#index-4')], [], None])]) + assert index[4] == ('Е', + [('ёлка', [[('', '#index-6')], [], None]), + ('Ель', [[('', '#index-5')], [], None])]) + # Here the word starts with U+200F RIGHT-TO-LEFT MARK, which should be + # ignored when getting the first letter. + assert index[5] == ('\u05e2', [( + '\N{RIGHT-TO-LEFT MARK}\u05e2\u05d1\u05e8\u05d9\u05ea\N{LEFT-TO-RIGHT MARK}', + [[('', '#index-7')], [], None], + )]) + + +@pytest.mark.sphinx('dummy', freshenv=True) +def test_create_pair_index(app): + text = (".. index:: pair: docutils; reStructuredText\n" + ".. index:: pair: Python; interpreter\n" + ".. index:: pair: Sphinx; documentation tool\n" + ".. index:: pair: Sphinx; :+1:\n" + ".. index:: pair: Sphinx; Ель\n" + ".. index:: pair: Sphinx; ёлка\n") + restructuredtext.parse(app, text) + index = IndexEntries(app.env).create_index(app.builder) + assert len(index) == 7 + assert index[0] == ('Symbols', [(':+1:', [[], [('Sphinx', [('', '#index-3')])], None])]) + assert index[1] == ('D', + [('documentation tool', [[], [('Sphinx', [('', '#index-2')])], None]), + ('docutils', [[], [('reStructuredText', [('', '#index-0')])], None])]) + assert index[2] == ('I', [('interpreter', [[], [('Python', [('', '#index-1')])], None])]) + assert index[3] == ('P', [('Python', [[], [('interpreter', [('', '#index-1')])], None])]) + assert index[4] == ('R', + [('reStructuredText', [[], [('docutils', [('', '#index-0')])], None])]) + assert index[5] == ('S', + [('Sphinx', [[], + [(':+1:', [('', '#index-3')]), + ('documentation tool', [('', '#index-2')]), + ('ёлка', [('', '#index-5')]), + ('Ель', [('', '#index-4')])], + None])]) + assert index[6] == ('Е', + [('ёлка', [[], [('Sphinx', [('', '#index-5')])], None]), + ('Ель', [[], [('Sphinx', [('', '#index-4')])], None])]) + + +@pytest.mark.sphinx('dummy', freshenv=True) +def test_create_triple_index(app): + text = (".. index:: triple: foo; bar; baz\n" + ".. index:: triple: Python; Sphinx; reST\n") + restructuredtext.parse(app, text) + index = IndexEntries(app.env).create_index(app.builder) + assert len(index) == 5 + assert index[0] == ('B', [('bar', [[], [('baz, foo', [('', '#index-0')])], None]), + ('baz', [[], [('foo bar', [('', '#index-0')])], None])]) + assert index[1] == ('F', [('foo', [[], [('bar baz', [('', '#index-0')])], None])]) + assert index[2] == ('P', [('Python', [[], [('Sphinx reST', [('', '#index-1')])], None])]) + assert index[3] == ('R', [('reST', [[], [('Python Sphinx', [('', '#index-1')])], None])]) + assert index[4] == ('S', [('Sphinx', [[], [('reST, Python', [('', '#index-1')])], None])]) + + +@pytest.mark.sphinx('dummy', freshenv=True) +def test_create_see_index(app): + text = (".. index:: see: docutils; reStructuredText\n" + ".. index:: see: Python; interpreter\n" + ".. index:: see: Sphinx; documentation tool\n") + restructuredtext.parse(app, text) + index = IndexEntries(app.env).create_index(app.builder) + assert len(index) == 3 + assert index[0] == ('D', [('docutils', [[], [('see reStructuredText', [])], None])]) + assert index[1] == ('P', [('Python', [[], [('see interpreter', [])], None])]) + assert index[2] == ('S', [('Sphinx', [[], [('see documentation tool', [])], None])]) + + +@pytest.mark.sphinx('dummy', freshenv=True) +def test_create_seealso_index(app): + text = (".. index:: seealso: docutils; reStructuredText\n" + ".. index:: seealso: Python; interpreter\n" + ".. index:: seealso: Sphinx; documentation tool\n") + restructuredtext.parse(app, text) + index = IndexEntries(app.env).create_index(app.builder) + assert len(index) == 3 + assert index[0] == ('D', [('docutils', [[], [('see also reStructuredText', [])], None])]) + assert index[1] == ('P', [('Python', [[], [('see also interpreter', [])], None])]) + assert index[2] == ('S', [('Sphinx', [[], [('see also documentation tool', [])], None])]) + + +@pytest.mark.sphinx('dummy', freshenv=True) +def test_create_main_index(app): + text = (".. index:: !docutils\n" + ".. index:: docutils\n" + ".. index:: pip; install\n" + ".. index:: !pip; install\n") + restructuredtext.parse(app, text) + index = IndexEntries(app.env).create_index(app.builder) + assert len(index) == 2 + assert index[0] == ('D', [('docutils', [[('main', '#index-0'), + ('', '#index-1')], [], None])]) + assert index[1] == ('P', [('pip', [[], [('install', [('main', '#index-3'), + ('', '#index-2')])], None])]) + + +@pytest.mark.sphinx('dummy', freshenv=True) +def test_create_index_with_name(app): + text = (".. index:: single: docutils\n" + " :name: ref1\n" + ".. index:: single: Python\n" + " :name: ref2\n" + ".. index:: Sphinx\n") + restructuredtext.parse(app, text) + index = IndexEntries(app.env).create_index(app.builder) + + # check index is created correctly + assert len(index) == 3 + assert index[0] == ('D', [('docutils', [[('', '#ref1')], [], None])]) + assert index[1] == ('P', [('Python', [[('', '#ref2')], [], None])]) + assert index[2] == ('S', [('Sphinx', [[('', '#index-0')], [], None])]) + + # check the reference labels are created correctly + std = app.env.get_domain('std') + assert std.anonlabels['ref1'] == ('index', 'ref1') + assert std.anonlabels['ref2'] == ('index', 'ref2') + + +@pytest.mark.sphinx('dummy', freshenv=True) +def test_create_index_by_key(app): + # At present, only glossary directive is able to create index key + text = (".. glossary::\n" + "\n" + " docutils\n" + " Python\n" + " スフィンクス : ス\n") + restructuredtext.parse(app, text) + index = IndexEntries(app.env).create_index(app.builder) + assert len(index) == 3 + assert index[0] == ('D', [('docutils', [[('main', '#term-docutils')], [], None])]) + assert index[1] == ('P', [('Python', [[('main', '#term-Python')], [], None])]) + assert index[2] == ('ス', [('スフィンクス', [[('main', '#term-0')], [], 'ス'])]) diff --git a/tests/test_environment_record_dependencies.py b/tests/test_environment_record_dependencies.py new file mode 100644 index 0000000..0a17253 --- /dev/null +++ b/tests/test_environment_record_dependencies.py @@ -0,0 +1,10 @@ +"""Tests for ``record_dependencies``.""" + +import pytest + + +@pytest.mark.sphinx('html', testroot='environment-record-dependencies') +def test_record_dependencies_cleared(app): + app.builder.read() + assert app.env.dependencies['index'] == set() + assert app.env.dependencies['api'] == {'example_module.py'} diff --git a/tests/test_environment_toctree.py b/tests/test_environment_toctree.py new file mode 100644 index 0000000..5123715 --- /dev/null +++ b/tests/test_environment_toctree.py @@ -0,0 +1,396 @@ +"""Test the sphinx.environment.adapters.toctree.""" + +import pytest +from docutils import nodes +from docutils.nodes import bullet_list, list_item, literal, reference, title + +from sphinx import addnodes +from sphinx.addnodes import compact_paragraph, only +from sphinx.builders.html import StandaloneHTMLBuilder +from sphinx.environment.adapters.toctree import document_toc, global_toctree_for_doc +from sphinx.testing.util import assert_node + + +@pytest.mark.sphinx('xml', testroot='toctree') +@pytest.mark.test_params(shared_result='test_environment_toctree_basic') +def test_process_doc(app): + app.build() + # tocs + toctree = app.env.tocs['index'] + assert_node(toctree, + [bullet_list, ([list_item, (compact_paragraph, # [0][0] + [bullet_list, (addnodes.toctree, # [0][1][0] + only, # [0][1][1] + list_item)])], # [0][1][2] + [list_item, (compact_paragraph, # [1][0] + [bullet_list, (addnodes.toctree, # [1][1][0] + addnodes.toctree)])], # [1][1][1] + list_item)]) + + assert_node(toctree[0][0], + [compact_paragraph, reference, "Welcome to Sphinx Tests’s documentation!"]) + assert_node(toctree[0][0][0], reference, anchorname='') + assert_node(toctree[0][1][0], addnodes.toctree, + caption="Table of Contents", glob=False, hidden=False, + titlesonly=False, maxdepth=2, numbered=999, + entries=[(None, 'foo'), (None, 'bar'), (None, 'http://sphinx-doc.org/'), + (None, 'self')], + includefiles=['foo', 'bar']) + + # only branch + assert_node(toctree[0][1][1], addnodes.only, expr="html") + assert_node(toctree[0][1][1], + [only, list_item, ([compact_paragraph, reference, "Section for HTML"], + [bullet_list, addnodes.toctree])]) + assert_node(toctree[0][1][1][0][0][0], reference, anchorname='#section-for-html') + assert_node(toctree[0][1][1][0][1][0], addnodes.toctree, + caption=None, glob=False, hidden=False, entries=[(None, 'baz')], + includefiles=['baz'], titlesonly=False, maxdepth=-1, numbered=0) + assert_node(toctree[0][1][2], + ([compact_paragraph, reference, "subsection"], + [bullet_list, list_item, compact_paragraph, reference, "subsubsection"])) + + assert_node(toctree[1][0], + [compact_paragraph, reference, "Test for issue #1157"]) + assert_node(toctree[1][0][0], reference, anchorname='#test-for-issue-1157') + assert_node(toctree[1][1][0], addnodes.toctree, + caption=None, entries=[], glob=False, hidden=False, + titlesonly=False, maxdepth=-1, numbered=0) + assert_node(toctree[1][1][1], addnodes.toctree, + caption=None, glob=False, hidden=True, + titlesonly=False, maxdepth=-1, numbered=0, + entries=[('Latest reference', 'http://sphinx-doc.org/latest/'), + ('Python', 'http://python.org/')]) + + assert_node(toctree[2][0], + [compact_paragraph, reference, "Indices and tables"]) + + # other collections + assert app.env.toc_num_entries['index'] == 6 + assert app.env.toctree_includes['index'] == ['foo', 'bar', 'baz'] + assert app.env.files_to_rebuild['foo'] == {'index'} + assert app.env.files_to_rebuild['bar'] == {'index'} + assert app.env.files_to_rebuild['baz'] == {'index'} + assert app.env.glob_toctrees == set() + assert app.env.numbered_toctrees == {'index'} + + # qux has no section title + assert len(app.env.tocs['qux']) == 0 + assert_node(app.env.tocs['qux'], nodes.bullet_list) + assert app.env.toc_num_entries['qux'] == 0 + assert 'qux' not in app.env.toctree_includes + + +@pytest.mark.sphinx('dummy', testroot='toctree-glob') +def test_glob(app): + includefiles = ['foo', 'bar/index', 'bar/bar_1', 'bar/bar_2', + 'bar/bar_3', 'baz', 'qux/index'] + + app.build() + + # tocs + toctree = app.env.tocs['index'] + assert_node(toctree, + [bullet_list, list_item, (compact_paragraph, # [0][0] + [bullet_list, (list_item, # [0][1][0] + list_item)])]) # [0][1][1] + + assert_node(toctree[0][0], + [compact_paragraph, reference, "test-toctree-glob"]) + assert_node(toctree[0][1][0], + [list_item, ([compact_paragraph, reference, "normal order"], + [bullet_list, addnodes.toctree])]) # [0][1][0][1][0] + assert_node(toctree[0][1][0][1][0], addnodes.toctree, caption=None, + glob=True, hidden=False, titlesonly=False, + maxdepth=-1, numbered=0, includefiles=includefiles, + entries=[(None, 'foo'), (None, 'bar/index'), (None, 'bar/bar_1'), + (None, 'bar/bar_2'), (None, 'bar/bar_3'), (None, 'baz'), + (None, 'qux/index'), + ('hyperref', 'https://sphinx-doc.org/?q=sphinx')]) + assert_node(toctree[0][1][1], + [list_item, ([compact_paragraph, reference, "reversed order"], + [bullet_list, addnodes.toctree])]) # [0][1][1][1][0] + assert_node(toctree[0][1][1][1][0], addnodes.toctree, caption=None, + glob=True, hidden=False, titlesonly=False, + maxdepth=-1, numbered=0, includefiles=list(reversed(includefiles)), + entries=[(None, 'qux/index'), (None, 'baz'), (None, 'bar/bar_3'), + (None, 'bar/bar_2'), (None, 'bar/bar_1'), (None, 'bar/index'), + (None, 'foo')]) + + # other collections + assert app.env.toc_num_entries['index'] == 3 + assert app.env.toctree_includes['index'] == includefiles + list(reversed(includefiles)) + for file in includefiles: + assert 'index' in app.env.files_to_rebuild[file] + assert 'index' in app.env.glob_toctrees + assert app.env.numbered_toctrees == set() + + +@pytest.mark.sphinx('dummy', testroot='toctree-domain-objects') +def test_domain_objects(app): + app.build() + + assert app.env.toc_num_entries['index'] == 0 + assert app.env.toc_num_entries['domains'] == 9 + assert app.env.toctree_includes['index'] == ['domains'] + assert 'index' in app.env.files_to_rebuild['domains'] + assert app.env.glob_toctrees == set() + assert app.env.numbered_toctrees == {'index'} + + # tocs + toctree = app.env.tocs['domains'] + assert_node(toctree, + [bullet_list, list_item, (compact_paragraph, # [0][0] + [bullet_list, (list_item, # [0][1][0] + [list_item, # [0][1][1] + (compact_paragraph, # [0][1][1][0] + [bullet_list, (list_item, # [0][1][1][1][0] + list_item, + list_item, + list_item)])], # [0][1][1][1][3] + list_item, + list_item)])]) # [0][1][1] + + assert_node(toctree[0][0], + [compact_paragraph, reference, "test-domain-objects"]) + + assert_node(toctree[0][1][0], + [list_item, ([compact_paragraph, reference, literal, "world()"])]) + + assert_node(toctree[0][1][1][1][3], + [list_item, ([compact_paragraph, reference, literal, "HelloWorldPrinter.print()"])]) + + +@pytest.mark.sphinx('xml', testroot='toctree') +@pytest.mark.test_params(shared_result='test_environment_toctree_basic') +def test_document_toc(app): + app.build() + toctree = document_toc(app.env, 'index', app.builder.tags) + + assert_node(toctree, + [bullet_list, ([list_item, (compact_paragraph, # [0][0] + [bullet_list, (addnodes.toctree, # [0][1][0] + list_item)])], # [0][1][1] + [list_item, (compact_paragraph, # [1][0] + [bullet_list, (addnodes.toctree, + addnodes.toctree)])], + [list_item, compact_paragraph])]) # [2][0] + assert_node(toctree[0][0], + [compact_paragraph, reference, "Welcome to Sphinx Tests’s documentation!"]) + assert_node(toctree[0][1][1], + ([compact_paragraph, reference, "subsection"], + [bullet_list, list_item, compact_paragraph, reference, "subsubsection"])) + assert_node(toctree[1][0], + [compact_paragraph, reference, "Test for issue #1157"]) + assert_node(toctree[2][0], + [compact_paragraph, reference, "Indices and tables"]) + + +@pytest.mark.sphinx('xml', testroot='toctree') +@pytest.mark.test_params(shared_result='test_environment_toctree_basic') +def test_document_toc_only(app): + app.build() + builder = StandaloneHTMLBuilder(app, app.env) + toctree = document_toc(app.env, 'index', builder.tags) + + assert_node(toctree, + [bullet_list, ([list_item, (compact_paragraph, # [0][0] + [bullet_list, (addnodes.toctree, # [0][1][0] + list_item, # [0][1][1] + list_item)])], # [0][1][2] + [list_item, (compact_paragraph, # [1][0] + [bullet_list, (addnodes.toctree, + addnodes.toctree)])], + [list_item, compact_paragraph])]) # [2][0] + assert_node(toctree[0][0], + [compact_paragraph, reference, "Welcome to Sphinx Tests’s documentation!"]) + assert_node(toctree[0][1][1], + ([compact_paragraph, reference, "Section for HTML"], + [bullet_list, addnodes.toctree])) + assert_node(toctree[0][1][2], + ([compact_paragraph, reference, "subsection"], + [bullet_list, list_item, compact_paragraph, reference, "subsubsection"])) + assert_node(toctree[1][0], + [compact_paragraph, reference, "Test for issue #1157"]) + assert_node(toctree[2][0], + [compact_paragraph, reference, "Indices and tables"]) + + +@pytest.mark.sphinx('xml', testroot='toctree') +@pytest.mark.test_params(shared_result='test_environment_toctree_basic') +def test_document_toc_tocdepth(app): + app.build() + toctree = document_toc(app.env, 'tocdepth', app.builder.tags) + + assert_node(toctree, + [bullet_list, list_item, (compact_paragraph, # [0][0] + bullet_list)]) # [0][1] + assert_node(toctree[0][0], + [compact_paragraph, reference, "level 1"]) + assert_node(toctree[0][1], + [bullet_list, list_item, compact_paragraph, reference, "level 2"]) + + +@pytest.mark.sphinx('xml', testroot='toctree') +@pytest.mark.test_params(shared_result='test_environment_toctree_basic') +def test_global_toctree_for_doc(app): + app.build() + toctree = global_toctree_for_doc(app.env, 'index', app.builder, collapse=False) + assert_node(toctree, + [compact_paragraph, ([title, "Table of Contents"], + bullet_list, + bullet_list, + bullet_list)]) + + assert_node(toctree[1], + ([list_item, ([compact_paragraph, reference, "foo"], + bullet_list)], + [list_item, compact_paragraph, reference, "bar"], + [list_item, compact_paragraph, reference, "http://sphinx-doc.org/"], + [list_item, compact_paragraph, reference, + "Welcome to Sphinx Tests’s documentation!"])) + assert_node(toctree[1][0][1], + ([list_item, compact_paragraph, reference, "quux"], + [list_item, compact_paragraph, reference, "foo.1"], + [list_item, compact_paragraph, reference, "foo.2"])) + + assert_node(toctree[1][0][0][0], reference, refuri="foo", secnumber=[1]) + assert_node(toctree[1][0][1][0][0][0], reference, refuri="quux", secnumber=[1, 1]) + assert_node(toctree[1][0][1][1][0][0], reference, refuri="foo#foo-1", secnumber=[1, 2]) + assert_node(toctree[1][0][1][2][0][0], reference, refuri="foo#foo-2", secnumber=[1, 3]) + assert_node(toctree[1][1][0][0], reference, refuri="bar", secnumber=[2]) + assert_node(toctree[1][2][0][0], reference, refuri="http://sphinx-doc.org/") + assert_node(toctree[1][3][0][0], reference, refuri="") + + assert_node(toctree[2], + [bullet_list, list_item, compact_paragraph, reference, "baz"]) + assert_node(toctree[3], + ([list_item, compact_paragraph, reference, "Latest reference"], + [list_item, compact_paragraph, reference, "Python"])) + assert_node(toctree[3][0][0][0], reference, refuri="http://sphinx-doc.org/latest/") + assert_node(toctree[3][1][0][0], reference, refuri="http://python.org/") + + +@pytest.mark.sphinx('xml', testroot='toctree') +@pytest.mark.test_params(shared_result='test_environment_toctree_basic') +def test_global_toctree_for_doc_collapse(app): + app.build() + toctree = global_toctree_for_doc(app.env, 'index', app.builder, collapse=True) + assert_node(toctree, + [compact_paragraph, ([title, "Table of Contents"], + bullet_list, + bullet_list, + bullet_list)]) + + assert_node(toctree[1], + ([list_item, compact_paragraph, reference, "foo"], + [list_item, compact_paragraph, reference, "bar"], + [list_item, compact_paragraph, reference, "http://sphinx-doc.org/"], + [list_item, compact_paragraph, reference, + "Welcome to Sphinx Tests’s documentation!"])) + assert_node(toctree[1][0][0][0], reference, refuri="foo", secnumber=[1]) + assert_node(toctree[1][1][0][0], reference, refuri="bar", secnumber=[2]) + assert_node(toctree[1][2][0][0], reference, refuri="http://sphinx-doc.org/") + assert_node(toctree[1][3][0][0], reference, refuri="") + + assert_node(toctree[2], + [bullet_list, list_item, compact_paragraph, reference, "baz"]) + assert_node(toctree[3], + ([list_item, compact_paragraph, reference, "Latest reference"], + [list_item, compact_paragraph, reference, "Python"])) + assert_node(toctree[3][0][0][0], reference, refuri="http://sphinx-doc.org/latest/") + assert_node(toctree[3][1][0][0], reference, refuri="http://python.org/") + + +@pytest.mark.sphinx('xml', testroot='toctree') +@pytest.mark.test_params(shared_result='test_environment_toctree_basic') +def test_global_toctree_for_doc_maxdepth(app): + app.build() + toctree = global_toctree_for_doc(app.env, 'index', app.builder, + collapse=False, maxdepth=3) + assert_node(toctree, + [compact_paragraph, ([title, "Table of Contents"], + bullet_list, + bullet_list, + bullet_list)]) + + assert_node(toctree[1], + ([list_item, ([compact_paragraph, reference, "foo"], + bullet_list)], + [list_item, compact_paragraph, reference, "bar"], + [list_item, compact_paragraph, reference, "http://sphinx-doc.org/"], + [list_item, compact_paragraph, reference, + "Welcome to Sphinx Tests’s documentation!"])) + assert_node(toctree[1][0][1], + ([list_item, compact_paragraph, reference, "quux"], + [list_item, ([compact_paragraph, reference, "foo.1"], + bullet_list)], + [list_item, compact_paragraph, reference, "foo.2"])) + assert_node(toctree[1][0][1][1][1], + [bullet_list, list_item, compact_paragraph, reference, "foo.1-1"]) + + assert_node(toctree[1][0][0][0], reference, refuri="foo", secnumber=[1]) + assert_node(toctree[1][0][1][0][0][0], reference, refuri="quux", secnumber=[1, 1]) + assert_node(toctree[1][0][1][1][0][0], reference, refuri="foo#foo-1", secnumber=[1, 2]) + assert_node(toctree[1][0][1][1][1][0][0][0], + reference, refuri="foo#foo-1-1", secnumber=[1, 2, 1]) + assert_node(toctree[1][0][1][2][0][0], reference, refuri="foo#foo-2", secnumber=[1, 3]) + assert_node(toctree[1][1][0][0], reference, refuri="bar", secnumber=[2]) + assert_node(toctree[1][2][0][0], reference, refuri="http://sphinx-doc.org/") + assert_node(toctree[1][3][0][0], reference, refuri="") + + assert_node(toctree[2], + [bullet_list, list_item, compact_paragraph, reference, "baz"]) + assert_node(toctree[3], + ([list_item, compact_paragraph, reference, "Latest reference"], + [list_item, compact_paragraph, reference, "Python"])) + assert_node(toctree[3][0][0][0], reference, refuri="http://sphinx-doc.org/latest/") + assert_node(toctree[3][1][0][0], reference, refuri="http://python.org/") + + +@pytest.mark.sphinx('xml', testroot='toctree') +@pytest.mark.test_params(shared_result='test_environment_toctree_basic') +def test_global_toctree_for_doc_includehidden(app): + app.build() + toctree = global_toctree_for_doc(app.env, 'index', app.builder, + collapse=False, includehidden=False) + assert_node(toctree, + [compact_paragraph, ([title, "Table of Contents"], + bullet_list, + bullet_list)]) + + assert_node(toctree[1], + ([list_item, ([compact_paragraph, reference, "foo"], + bullet_list)], + [list_item, compact_paragraph, reference, "bar"], + [list_item, compact_paragraph, reference, "http://sphinx-doc.org/"], + [list_item, compact_paragraph, reference, + "Welcome to Sphinx Tests’s documentation!"])) + assert_node(toctree[1][0][1], + ([list_item, compact_paragraph, reference, "quux"], + [list_item, compact_paragraph, reference, "foo.1"], + [list_item, compact_paragraph, reference, "foo.2"])) + + assert_node(toctree[1][0][0][0], reference, refuri="foo", secnumber=[1]) + assert_node(toctree[1][0][1][0][0][0], reference, refuri="quux", secnumber=[1, 1]) + assert_node(toctree[1][0][1][1][0][0], reference, refuri="foo#foo-1", secnumber=[1, 2]) + assert_node(toctree[1][0][1][2][0][0], reference, refuri="foo#foo-2", secnumber=[1, 3]) + assert_node(toctree[1][1][0][0], reference, refuri="bar", secnumber=[2]) + assert_node(toctree[1][2][0][0], reference, refuri="http://sphinx-doc.org/") + + assert_node(toctree[2], + [bullet_list, list_item, compact_paragraph, reference, "baz"]) + + +@pytest.mark.sphinx('xml', testroot='toctree-index') +def test_toctree_index(app): + app.build() + toctree = app.env.tocs['index'] + assert_node(toctree, + [bullet_list, ([list_item, (compact_paragraph, # [0][0] + [bullet_list, (addnodes.toctree, # [0][1][0] + addnodes.toctree)])])]) # [0][1][1] + assert_node(toctree[0][1][1], addnodes.toctree, + caption="Indices", glob=False, hidden=False, + titlesonly=False, maxdepth=-1, numbered=0, + entries=[(None, 'genindex'), (None, 'modindex'), (None, 'search')]) diff --git a/tests/test_errors.py b/tests/test_errors.py new file mode 100644 index 0000000..6378760 --- /dev/null +++ b/tests/test_errors.py @@ -0,0 +1,11 @@ +from sphinx.errors import ExtensionError + + +def test_extension_error_repr(): + exc = ExtensionError("foo") + assert repr(exc) == "ExtensionError('foo')" + + +def test_extension_error_with_orig_exc_repr(): + exc = ExtensionError("foo", Exception("bar")) + assert repr(exc) == "ExtensionError('foo', Exception('bar'))" diff --git a/tests/test_events.py b/tests/test_events.py new file mode 100644 index 0000000..d850a91 --- /dev/null +++ b/tests/test_events.py @@ -0,0 +1,56 @@ +"""Test the EventManager class.""" + +import pytest + +from sphinx.errors import ExtensionError +from sphinx.events import EventManager + + +def test_event_priority(): + result = [] + events = EventManager(object()) # pass an dummy object as an app + events.connect('builder-inited', lambda app: result.append(1), priority = 500) + events.connect('builder-inited', lambda app: result.append(2), priority = 500) + events.connect('builder-inited', lambda app: result.append(3), priority = 200) # earlier + events.connect('builder-inited', lambda app: result.append(4), priority = 700) # later + events.connect('builder-inited', lambda app: result.append(5), priority = 500) + + events.emit('builder-inited') + assert result == [3, 1, 2, 5, 4] + + +class FakeApp: + def __init__(self, pdb: bool = False): + self.pdb = pdb + + +def test_event_allowed_exceptions(): + def raise_error(app): + raise RuntimeError + + events = EventManager(FakeApp()) # pass an dummy object as an app + events.connect('builder-inited', raise_error, priority=500) + + # all errors are converted to ExtensionError + with pytest.raises(ExtensionError): + events.emit('builder-inited') + + # Allow RuntimeError (pass-through) + with pytest.raises(RuntimeError): + events.emit('builder-inited', allowed_exceptions=(RuntimeError,)) + + +def test_event_pdb(): + def raise_error(app): + raise RuntimeError + + events = EventManager(FakeApp(pdb=True)) # pass an dummy object as an app + events.connect('builder-inited', raise_error, priority=500) + + # errors aren't converted + with pytest.raises(RuntimeError): + events.emit('builder-inited') + + # Allow RuntimeError (pass-through) + with pytest.raises(RuntimeError): + events.emit('builder-inited', allowed_exceptions=(RuntimeError,)) diff --git a/tests/test_ext_apidoc.py b/tests/test_ext_apidoc.py new file mode 100644 index 0000000..1e089a3 --- /dev/null +++ b/tests/test_ext_apidoc.py @@ -0,0 +1,665 @@ +"""Test the sphinx.apidoc module.""" + +import os.path +from collections import namedtuple + +import pytest + +import sphinx.ext.apidoc +from sphinx.ext.apidoc import main as apidoc_main + + +@pytest.fixture() +def apidoc(rootdir, tmp_path, apidoc_params): + _, kwargs = apidoc_params + coderoot = rootdir / kwargs.get('coderoot', 'test-root') + outdir = tmp_path / 'out' + excludes = [str(coderoot / e) for e in kwargs.get('excludes', [])] + args = ['-o', str(outdir), '-F', str(coderoot)] + excludes + kwargs.get('options', []) + apidoc_main(args) + return namedtuple('apidoc', 'coderoot,outdir')(coderoot, outdir) + + +@pytest.fixture() +def apidoc_params(request): + pargs = {} + kwargs = {} + + for info in reversed(list(request.node.iter_markers("apidoc"))): + for i, a in enumerate(info.args): + pargs[i] = a + kwargs.update(info.kwargs) + + args = [pargs[i] for i in sorted(pargs.keys())] + return args, kwargs + + +@pytest.mark.apidoc(coderoot='test-root') +def test_simple(make_app, apidoc): + outdir = apidoc.outdir + assert (outdir / 'conf.py').is_file() + assert (outdir / 'index.rst').is_file() + + app = make_app('text', srcdir=outdir) + app.build() + print(app._status.getvalue()) + print(app._warning.getvalue()) + + +@pytest.mark.apidoc( + coderoot='test-apidoc-pep420/a', + options=["--implicit-namespaces"], +) +def test_pep_0420_enabled(make_app, apidoc): + outdir = apidoc.outdir + assert (outdir / 'conf.py').is_file() + assert (outdir / 'a.b.c.rst').is_file() + assert (outdir / 'a.b.e.rst').is_file() + assert (outdir / 'a.b.x.rst').is_file() + + with open(outdir / 'a.b.c.rst', encoding='utf-8') as f: + rst = f.read() + assert "automodule:: a.b.c.d\n" in rst + assert "automodule:: a.b.c\n" in rst + + with open(outdir / 'a.b.e.rst', encoding='utf-8') as f: + rst = f.read() + assert "automodule:: a.b.e.f\n" in rst + + with open(outdir / 'a.b.x.rst', encoding='utf-8') as f: + rst = f.read() + assert "automodule:: a.b.x.y\n" in rst + assert "automodule:: a.b.x\n" not in rst + + app = make_app('text', srcdir=outdir) + app.build() + print(app._status.getvalue()) + print(app._warning.getvalue()) + + builddir = outdir / '_build' / 'text' + assert (builddir / 'a.b.c.txt').is_file() + assert (builddir / 'a.b.e.txt').is_file() + assert (builddir / 'a.b.x.txt').is_file() + + with open(builddir / 'a.b.c.txt', encoding='utf-8') as f: + txt = f.read() + assert "a.b.c package\n" in txt + + with open(builddir / 'a.b.e.txt', encoding='utf-8') as f: + txt = f.read() + assert "a.b.e.f module\n" in txt + + with open(builddir / 'a.b.x.txt', encoding='utf-8') as f: + txt = f.read() + assert "a.b.x namespace\n" in txt + + +@pytest.mark.apidoc( + coderoot='test-apidoc-pep420/a', + options=["--implicit-namespaces", "--separate"], +) +def test_pep_0420_enabled_separate(make_app, apidoc): + outdir = apidoc.outdir + assert (outdir / 'conf.py').is_file() + assert (outdir / 'a.b.c.rst').is_file() + assert (outdir / 'a.b.e.rst').is_file() + assert (outdir / 'a.b.e.f.rst').is_file() + assert (outdir / 'a.b.x.rst').is_file() + assert (outdir / 'a.b.x.y.rst').is_file() + + with open(outdir / 'a.b.c.rst', encoding='utf-8') as f: + rst = f.read() + assert ".. toctree::\n :maxdepth: 4\n\n a.b.c.d\n" in rst + + with open(outdir / 'a.b.e.rst', encoding='utf-8') as f: + rst = f.read() + assert ".. toctree::\n :maxdepth: 4\n\n a.b.e.f\n" in rst + + with open(outdir / 'a.b.x.rst', encoding='utf-8') as f: + rst = f.read() + assert ".. toctree::\n :maxdepth: 4\n\n a.b.x.y\n" in rst + + app = make_app('text', srcdir=outdir) + app.build() + print(app._status.getvalue()) + print(app._warning.getvalue()) + + builddir = outdir / '_build' / 'text' + assert (builddir / 'a.b.c.txt').is_file() + assert (builddir / 'a.b.e.txt').is_file() + assert (builddir / 'a.b.e.f.txt').is_file() + assert (builddir / 'a.b.x.txt').is_file() + assert (builddir / 'a.b.x.y.txt').is_file() + + with open(builddir / 'a.b.c.txt', encoding='utf-8') as f: + txt = f.read() + assert "a.b.c package\n" in txt + + with open(builddir / 'a.b.e.f.txt', encoding='utf-8') as f: + txt = f.read() + assert "a.b.e.f module\n" in txt + + with open(builddir / 'a.b.x.txt', encoding='utf-8') as f: + txt = f.read() + assert "a.b.x namespace\n" in txt + + +@pytest.mark.apidoc(coderoot='test-apidoc-pep420/a') +def test_pep_0420_disabled(make_app, apidoc): + outdir = apidoc.outdir + assert (outdir / 'conf.py').is_file() + assert not (outdir / 'a.b.c.rst').exists() + assert not (outdir / 'a.b.x.rst').exists() + + app = make_app('text', srcdir=outdir) + app.build() + print(app._status.getvalue()) + print(app._warning.getvalue()) + + +@pytest.mark.apidoc( + coderoot='test-apidoc-pep420/a/b') +def test_pep_0420_disabled_top_level_verify(make_app, apidoc): + outdir = apidoc.outdir + assert (outdir / 'conf.py').is_file() + assert (outdir / 'c.rst').is_file() + assert not (outdir / 'x.rst').exists() + + with open(outdir / 'c.rst', encoding='utf-8') as f: + rst = f.read() + assert "c package\n" in rst + assert "automodule:: c.d\n" in rst + assert "automodule:: c\n" in rst + + app = make_app('text', srcdir=outdir) + app.build() + print(app._status.getvalue()) + print(app._warning.getvalue()) + + +@pytest.mark.apidoc( + coderoot='test-apidoc-trailing-underscore') +def test_trailing_underscore(make_app, apidoc): + outdir = apidoc.outdir + assert (outdir / 'conf.py').is_file() + assert (outdir / 'package_.rst').is_file() + + app = make_app('text', srcdir=outdir) + app.build() + print(app._status.getvalue()) + print(app._warning.getvalue()) + + builddir = outdir / '_build' / 'text' + with open(builddir / 'package_.txt', encoding='utf-8') as f: + rst = f.read() + assert "package_ package\n" in rst + assert "package_.module_ module\n" in rst + + +@pytest.mark.apidoc( + coderoot='test-apidoc-pep420/a', + excludes=["b/c/d.py", "b/e/f.py", "b/e/__init__.py"], + options=["--implicit-namespaces", "--separate"], +) +def test_excludes(apidoc): + outdir = apidoc.outdir + assert (outdir / 'conf.py').is_file() + assert (outdir / 'a.rst').is_file() + assert (outdir / 'a.b.rst').is_file() + assert (outdir / 'a.b.c.rst').is_file() # generated because not empty + assert not (outdir / 'a.b.e.rst').is_file() # skipped because of empty after excludes + assert (outdir / 'a.b.x.rst').is_file() + assert (outdir / 'a.b.x.y.rst').is_file() + + +@pytest.mark.apidoc( + coderoot='test-apidoc-pep420/a', + excludes=["b/e"], + options=["--implicit-namespaces", "--separate"], +) +def test_excludes_subpackage_should_be_skipped(apidoc): + """Subpackage exclusion should work.""" + outdir = apidoc.outdir + assert (outdir / 'conf.py').is_file() + assert (outdir / 'a.rst').is_file() + assert (outdir / 'a.b.rst').is_file() + assert (outdir / 'a.b.c.rst').is_file() # generated because not empty + assert not (outdir / 'a.b.e.f.rst').is_file() # skipped because 'b/e' subpackage is skipped + + +@pytest.mark.apidoc( + coderoot='test-apidoc-pep420/a', + excludes=["b/e/f.py"], + options=["--implicit-namespaces", "--separate"], +) +def test_excludes_module_should_be_skipped(apidoc): + """Module exclusion should work.""" + outdir = apidoc.outdir + assert (outdir / 'conf.py').is_file() + assert (outdir / 'a.rst').is_file() + assert (outdir / 'a.b.rst').is_file() + assert (outdir / 'a.b.c.rst').is_file() # generated because not empty + assert not (outdir / 'a.b.e.f.rst').is_file() # skipped because of empty after excludes + + +@pytest.mark.apidoc( + coderoot='test-apidoc-pep420/a', + excludes=[], + options=["--implicit-namespaces", "--separate"], +) +def test_excludes_module_should_not_be_skipped(apidoc): + """Module should be included if no excludes are used.""" + outdir = apidoc.outdir + assert (outdir / 'conf.py').is_file() + assert (outdir / 'a.rst').is_file() + assert (outdir / 'a.b.rst').is_file() + assert (outdir / 'a.b.c.rst').is_file() # generated because not empty + assert (outdir / 'a.b.e.f.rst').is_file() # skipped because of empty after excludes + + +@pytest.mark.apidoc( + coderoot='test-root', + options=[ + '--doc-project', 'プロジェクト名', + '--doc-author', '著者名', + '--doc-version', 'バージョン', + '--doc-release', 'リリース', + ], +) +def test_multibyte_parameters(make_app, apidoc): + outdir = apidoc.outdir + assert (outdir / 'conf.py').is_file() + assert (outdir / 'index.rst').is_file() + + conf_py = (outdir / 'conf.py').read_text(encoding='utf8') + assert "project = 'プロジェクト名'" in conf_py + assert "author = '著者名'" in conf_py + assert "version = 'バージョン'" in conf_py + assert "release = 'リリース'" in conf_py + + app = make_app('text', srcdir=outdir) + app.build() + print(app._status.getvalue()) + print(app._warning.getvalue()) + + +@pytest.mark.apidoc( + coderoot='test-root', + options=['--ext-mathjax'], +) +def test_extension_parsed(make_app, apidoc): + outdir = apidoc.outdir + assert (outdir / 'conf.py').is_file() + + with open(outdir / 'conf.py', encoding='utf-8') as f: + rst = f.read() + assert "sphinx.ext.mathjax" in rst + + +@pytest.mark.apidoc( + coderoot='test-apidoc-toc/mypackage', + options=["--implicit-namespaces"], +) +def test_toc_all_references_should_exist_pep420_enabled(make_app, apidoc): + """All references in toc should exist. This test doesn't say if + directories with empty __init__.py and and nothing else should be + skipped, just ensures consistency between what's referenced in the toc + and what is created. This is the variant with pep420 enabled. + """ + outdir = apidoc.outdir + assert (outdir / 'conf.py').is_file() + + toc = extract_toc(outdir / 'mypackage.rst') + + refs = [l.strip() for l in toc.splitlines() if l.strip()] + found_refs = [] + missing_files = [] + for ref in refs: + if ref and ref[0] in (':', '#'): + continue + found_refs.append(ref) + filename = f"{ref}.rst" + if not (outdir / filename).is_file(): + missing_files.append(filename) + + assert len(missing_files) == 0, \ + 'File(s) referenced in TOC not found: {}\n' \ + 'TOC:\n{}'.format(", ".join(missing_files), toc) + + +@pytest.mark.apidoc( + coderoot='test-apidoc-toc/mypackage', +) +def test_toc_all_references_should_exist_pep420_disabled(make_app, apidoc): + """All references in toc should exist. This test doesn't say if + directories with empty __init__.py and and nothing else should be + skipped, just ensures consistency between what's referenced in the toc + and what is created. This is the variant with pep420 disabled. + """ + outdir = apidoc.outdir + assert (outdir / 'conf.py').is_file() + + toc = extract_toc(outdir / 'mypackage.rst') + + refs = [l.strip() for l in toc.splitlines() if l.strip()] + found_refs = [] + missing_files = [] + for ref in refs: + if ref and ref[0] in (':', '#'): + continue + filename = f"{ref}.rst" + found_refs.append(ref) + if not (outdir / filename).is_file(): + missing_files.append(filename) + + assert len(missing_files) == 0, \ + 'File(s) referenced in TOC not found: {}\n' \ + 'TOC:\n{}'.format(", ".join(missing_files), toc) + + +def extract_toc(path): + """Helper: Extract toc section from package rst file""" + with open(path, encoding='utf-8') as f: + rst = f.read() + + # Read out the part containing the toctree + toctree_start = "\n.. toctree::\n" + toctree_end = "\nSubmodules" + + start_idx = rst.index(toctree_start) + end_idx = rst.index(toctree_end, start_idx) + toctree = rst[start_idx + len(toctree_start):end_idx] + + return toctree + + +@pytest.mark.apidoc( + coderoot='test-apidoc-subpackage-in-toc', + options=['--separate'], +) +def test_subpackage_in_toc(make_app, apidoc): + """Make sure that empty subpackages with non-empty subpackages in them + are not skipped (issue #4520) + """ + outdir = apidoc.outdir + assert (outdir / 'conf.py').is_file() + + assert (outdir / 'parent.rst').is_file() + with open(outdir / 'parent.rst', encoding='utf-8') as f: + parent = f.read() + assert 'parent.child' in parent + + assert (outdir / 'parent.child.rst').is_file() + with open(outdir / 'parent.child.rst', encoding='utf-8') as f: + parent_child = f.read() + assert 'parent.child.foo' in parent_child + + assert (outdir / 'parent.child.foo.rst').is_file() + + +def test_private(tmp_path): + (tmp_path / 'hello.py').write_text('', encoding='utf8') + (tmp_path / '_world.py').write_text('', encoding='utf8') + + # without --private option + apidoc_main(['-o', str(tmp_path), str(tmp_path)]) + assert (tmp_path / 'hello.rst').exists() + assert ':private-members:' not in (tmp_path / 'hello.rst').read_text(encoding='utf8') + assert not (tmp_path / '_world.rst').exists() + + # with --private option + apidoc_main(['--private', '-f', '-o', str(tmp_path), str(tmp_path)]) + assert (tmp_path / 'hello.rst').exists() + assert ':private-members:' in (tmp_path / 'hello.rst').read_text(encoding='utf8') + assert (tmp_path / '_world.rst').exists() + + +def test_toc_file(tmp_path): + outdir = tmp_path + (outdir / 'module').mkdir(parents=True, exist_ok=True) + (outdir / 'example.py').write_text('', encoding='utf8') + (outdir / 'module' / 'example.py').write_text('', encoding='utf8') + apidoc_main(['-o', str(tmp_path), str(tmp_path)]) + assert (outdir / 'modules.rst').exists() + + content = (outdir / 'modules.rst').read_text(encoding='utf8') + assert content == ("test_toc_file0\n" + "==============\n" + "\n" + ".. toctree::\n" + " :maxdepth: 4\n" + "\n" + " example\n") + + +def test_module_file(tmp_path): + outdir = tmp_path + (outdir / 'example.py').write_text('', encoding='utf8') + apidoc_main(['-o', str(tmp_path), str(tmp_path)]) + assert (outdir / 'example.rst').exists() + + content = (outdir / 'example.rst').read_text(encoding='utf8') + assert content == ("example module\n" + "==============\n" + "\n" + ".. automodule:: example\n" + " :members:\n" + " :undoc-members:\n" + " :show-inheritance:\n") + + +def test_module_file_noheadings(tmp_path): + outdir = tmp_path + (outdir / 'example.py').write_text('', encoding='utf8') + apidoc_main(['--no-headings', '-o', str(tmp_path), str(tmp_path)]) + assert (outdir / 'example.rst').exists() + + content = (outdir / 'example.rst').read_text(encoding='utf8') + assert content == (".. automodule:: example\n" + " :members:\n" + " :undoc-members:\n" + " :show-inheritance:\n") + + +def test_package_file(tmp_path): + outdir = tmp_path + (outdir / 'testpkg').mkdir(parents=True, exist_ok=True) + (outdir / 'testpkg' / '__init__.py').write_text('', encoding='utf8') + (outdir / 'testpkg' / 'hello.py').write_text('', encoding='utf8') + (outdir / 'testpkg' / 'world.py').write_text('', encoding='utf8') + (outdir / 'testpkg' / 'subpkg').mkdir(parents=True, exist_ok=True) + (outdir / 'testpkg' / 'subpkg' / '__init__.py').write_text('', encoding='utf8') + apidoc_main(['-o', str(outdir), str(outdir / 'testpkg')]) + assert (outdir / 'testpkg.rst').exists() + assert (outdir / 'testpkg.subpkg.rst').exists() + + content = (outdir / 'testpkg.rst').read_text(encoding='utf8') + assert content == ("testpkg package\n" + "===============\n" + "\n" + "Subpackages\n" + "-----------\n" + "\n" + ".. toctree::\n" + " :maxdepth: 4\n" + "\n" + " testpkg.subpkg\n" + "\n" + "Submodules\n" + "----------\n" + "\n" + "testpkg.hello module\n" + "--------------------\n" + "\n" + ".. automodule:: testpkg.hello\n" + " :members:\n" + " :undoc-members:\n" + " :show-inheritance:\n" + "\n" + "testpkg.world module\n" + "--------------------\n" + "\n" + ".. automodule:: testpkg.world\n" + " :members:\n" + " :undoc-members:\n" + " :show-inheritance:\n" + "\n" + "Module contents\n" + "---------------\n" + "\n" + ".. automodule:: testpkg\n" + " :members:\n" + " :undoc-members:\n" + " :show-inheritance:\n") + + content = (outdir / 'testpkg.subpkg.rst').read_text(encoding='utf8') + assert content == ("testpkg.subpkg package\n" + "======================\n" + "\n" + "Module contents\n" + "---------------\n" + "\n" + ".. automodule:: testpkg.subpkg\n" + " :members:\n" + " :undoc-members:\n" + " :show-inheritance:\n") + + +def test_package_file_separate(tmp_path): + outdir = tmp_path + (outdir / 'testpkg').mkdir(parents=True, exist_ok=True) + (outdir / 'testpkg' / '__init__.py').write_text('', encoding='utf8') + (outdir / 'testpkg' / 'example.py').write_text('', encoding='utf8') + apidoc_main(['--separate', '-o', str(tmp_path), str(tmp_path / 'testpkg')]) + assert (outdir / 'testpkg.rst').exists() + assert (outdir / 'testpkg.example.rst').exists() + + content = (outdir / 'testpkg.rst').read_text(encoding='utf8') + assert content == ("testpkg package\n" + "===============\n" + "\n" + "Submodules\n" + "----------\n" + "\n" + ".. toctree::\n" + " :maxdepth: 4\n" + "\n" + " testpkg.example\n" + "\n" + "Module contents\n" + "---------------\n" + "\n" + ".. automodule:: testpkg\n" + " :members:\n" + " :undoc-members:\n" + " :show-inheritance:\n") + + content = (outdir / 'testpkg.example.rst').read_text(encoding='utf8') + assert content == ("testpkg.example module\n" + "======================\n" + "\n" + ".. automodule:: testpkg.example\n" + " :members:\n" + " :undoc-members:\n" + " :show-inheritance:\n") + + +def test_package_file_module_first(tmp_path): + outdir = tmp_path + (outdir / 'testpkg').mkdir(parents=True, exist_ok=True) + (outdir / 'testpkg' / '__init__.py').write_text('', encoding='utf8') + (outdir / 'testpkg' / 'example.py').write_text('', encoding='utf8') + apidoc_main(['--module-first', '-o', str(tmp_path), str(tmp_path)]) + + content = (outdir / 'testpkg.rst').read_text(encoding='utf8') + assert content == ("testpkg package\n" + "===============\n" + "\n" + ".. automodule:: testpkg\n" + " :members:\n" + " :undoc-members:\n" + " :show-inheritance:\n" + "\n" + "Submodules\n" + "----------\n" + "\n" + "testpkg.example module\n" + "----------------------\n" + "\n" + ".. automodule:: testpkg.example\n" + " :members:\n" + " :undoc-members:\n" + " :show-inheritance:\n") + + +def test_package_file_without_submodules(tmp_path): + outdir = tmp_path + (outdir / 'testpkg').mkdir(parents=True, exist_ok=True) + (outdir / 'testpkg' / '__init__.py').write_text('', encoding='utf8') + apidoc_main(['-o', str(tmp_path), str(tmp_path / 'testpkg')]) + assert (outdir / 'testpkg.rst').exists() + + content = (outdir / 'testpkg.rst').read_text(encoding='utf8') + assert content == ("testpkg package\n" + "===============\n" + "\n" + "Module contents\n" + "---------------\n" + "\n" + ".. automodule:: testpkg\n" + " :members:\n" + " :undoc-members:\n" + " :show-inheritance:\n") + + +def test_namespace_package_file(tmp_path): + outdir = tmp_path + (outdir / 'testpkg').mkdir(parents=True, exist_ok=True) + (outdir / 'testpkg' / 'example.py').write_text('', encoding='utf8') + apidoc_main(['--implicit-namespace', '-o', str(tmp_path), str(tmp_path / 'testpkg')]) + assert (outdir / 'testpkg.rst').exists() + + content = (outdir / 'testpkg.rst').read_text(encoding='utf8') + assert content == ("testpkg namespace\n" + "=================\n" + "\n" + ".. py:module:: testpkg\n" + "\n" + "Submodules\n" + "----------\n" + "\n" + "testpkg.example module\n" + "----------------------\n" + "\n" + ".. automodule:: testpkg.example\n" + " :members:\n" + " :undoc-members:\n" + " :show-inheritance:\n") + + +def test_no_duplicates(rootdir, tmp_path): + """Make sure that a ".pyx" and ".so" don't cause duplicate listings. + + We can't use pytest.mark.apidoc here as we use a different set of arguments + to apidoc_main + """ + + original_suffixes = sphinx.ext.apidoc.PY_SUFFIXES + try: + # Ensure test works on Windows + sphinx.ext.apidoc.PY_SUFFIXES += ('.so',) + + package = rootdir / 'test-apidoc-duplicates' / 'fish_licence' + outdir = tmp_path / 'out' + apidoc_main(['-o', str(outdir), "-T", str(package), "--implicit-namespaces"]) + + # Ensure the module has been documented + assert os.path.isfile(outdir / 'fish_licence.rst') + + # Ensure the submodule only appears once + text = (outdir / 'fish_licence.rst').read_text(encoding="utf-8") + count_submodules = text.count(r'fish\_licence.halibut module') + assert count_submodules == 1 + + finally: + sphinx.ext.apidoc.PY_SUFFIXES = original_suffixes diff --git a/tests/test_ext_autodoc.py b/tests/test_ext_autodoc.py new file mode 100644 index 0000000..7062763 --- /dev/null +++ b/tests/test_ext_autodoc.py @@ -0,0 +1,2537 @@ +"""Test the autodoc extension. + +This tests mainly the Documenters; the auto directives are tested in a test +source file translated by test_build. +""" + +import sys +from types import SimpleNamespace +from unittest.mock import Mock +from warnings import catch_warnings + +import pytest +from docutils.statemachine import ViewList + +from sphinx import addnodes +from sphinx.ext.autodoc import ALL, ModuleLevelDocumenter, Options +from sphinx.ext.autodoc.directive import DocumenterBridge, process_documenter_options +from sphinx.util.docutils import LoggingReporter + +try: + # Enable pyximport to test cython module + import pyximport + pyximport.install() +except ImportError: + pyximport = None + + +def do_autodoc(app, objtype, name, options=None): + if options is None: + options = {} + app.env.temp_data.setdefault('docname', 'index') # set dummy docname + doccls = app.registry.documenters[objtype] + docoptions = process_documenter_options(doccls, app.config, options) + state = Mock() + state.document.settings.tab_width = 8 + bridge = DocumenterBridge(app.env, LoggingReporter(''), docoptions, 1, state) + documenter = doccls(bridge, name) + documenter.generate() + + return bridge.result + + +def make_directive_bridge(env): + options = Options( + inherited_members = False, + undoc_members = False, + private_members = False, + special_members = False, + imported_members = False, + show_inheritance = False, + no_index = False, + annotation = None, + synopsis = '', + platform = '', + deprecated = False, + members = [], + member_order = 'alphabetical', + exclude_members = set(), + ignore_module_all = False, + ) + + directive = SimpleNamespace( + env = env, + genopt = options, + result = ViewList(), + record_dependencies = set(), + state = Mock(), + ) + directive.state.document.settings.tab_width = 8 + + return directive + + +processed_signatures = [] + + +def process_signature(app, what, name, obj, options, args, retann): + processed_signatures.append((what, name)) + if name == 'bar': + return '42', None + return None + + +def skip_member(app, what, name, obj, skip, options): + if name in ('__special1__', '__special2__'): + return skip + if name.startswith('__'): + return True + if name == 'skipmeth': + return True + return None + + +def test_parse_name(app): + def verify(objtype, name, result): + inst = app.registry.documenters[objtype](directive, name) + assert inst.parse_name() + assert (inst.modname, inst.objpath, inst.args, inst.retann) == result + + directive = make_directive_bridge(app.env) + + # for modules + verify('module', 'test_ext_autodoc', ('test_ext_autodoc', [], None, None)) + verify('module', 'test.test_ext_autodoc', ('test.test_ext_autodoc', [], None, None)) + verify('module', 'test(arg)', ('test', [], 'arg', None)) + assert 'signature arguments' in app._warning.getvalue() + + # for functions/classes + verify('function', 'test_ext_autodoc.raises', + ('test_ext_autodoc', ['raises'], None, None)) + verify('function', 'test_ext_autodoc.raises(exc) -> None', + ('test_ext_autodoc', ['raises'], 'exc', 'None')) + directive.env.temp_data['autodoc:module'] = 'test_ext_autodoc' + verify('function', 'raises', ('test_ext_autodoc', ['raises'], None, None)) + del directive.env.temp_data['autodoc:module'] + directive.env.ref_context['py:module'] = 'test_ext_autodoc' + verify('function', 'raises', ('test_ext_autodoc', ['raises'], None, None)) + verify('class', 'Base', ('test_ext_autodoc', ['Base'], None, None)) + + # for members + directive.env.ref_context['py:module'] = 'sphinx.testing.util' + verify('method', 'SphinxTestApp.cleanup', + ('sphinx.testing.util', ['SphinxTestApp', 'cleanup'], None, None)) + directive.env.ref_context['py:module'] = 'sphinx.testing.util' + directive.env.ref_context['py:class'] = 'Foo' + directive.env.temp_data['autodoc:class'] = 'SphinxTestApp' + verify('method', 'cleanup', + ('sphinx.testing.util', ['SphinxTestApp', 'cleanup'], None, None)) + verify('method', 'SphinxTestApp.cleanup', + ('sphinx.testing.util', ['SphinxTestApp', 'cleanup'], None, None)) + + +def test_format_signature(app): + app.connect('autodoc-process-signature', process_signature) + app.connect('autodoc-skip-member', skip_member) + + directive = make_directive_bridge(app.env) + + def formatsig(objtype, name, obj, args, retann): + inst = app.registry.documenters[objtype](directive, name) + inst.fullname = name + inst.doc_as_attr = False # for class objtype + inst.parent = object # dummy + inst.object = obj + inst.objpath = [name] + inst.args = args + inst.retann = retann + res = inst.format_signature() + print(res) + return res + + # no signatures for modules + assert formatsig('module', 'test', None, None, None) == '' + + # test for functions + def f(a, b, c=1, **d): + pass + + def g(a='\n'): + pass + assert formatsig('function', 'f', f, None, None) == '(a, b, c=1, **d)' + assert formatsig('function', 'f', f, 'a, b, c, d', None) == '(a, b, c, d)' + assert formatsig('function', 'g', g, None, None) == r"(a='\n')" + + # test for classes + class D: + pass + + class E: + def __init__(self): + pass + + # an empty init and no init are the same + for C in (D, E): + assert formatsig('class', 'D', C, None, None) == '()' + + class SomeMeta(type): + def __call__(cls, a, b=None): + return type.__call__(cls, a, b) + + # these three are all equivalent + class F: + def __init__(self, a, b=None): + pass + + class FNew: + def __new__(cls, a, b=None): + return super().__new__(cls) + + class FMeta(metaclass=SomeMeta): + pass + + # and subclasses should always inherit + class G(F): + pass + + class GNew(FNew): + pass + + class GMeta(FMeta): + pass + + # subclasses inherit + for C in (F, FNew, FMeta, G, GNew, GMeta): + assert formatsig('class', 'C', C, None, None) == '(a, b=None)' + assert formatsig('class', 'C', D, 'a, b', 'X') == '(a, b) -> X' + + class ListSubclass(list): + pass + + # only supported if the python implementation decides to document it + if getattr(list, '__text_signature__', None) is not None: + assert formatsig('class', 'C', ListSubclass, None, None) == '(iterable=(), /)' + else: + assert formatsig('class', 'C', ListSubclass, None, None) == '' + + class ExceptionSubclass(Exception): + pass + + # Exception has no __text_signature__ at least in Python 3.11 + if getattr(Exception, '__text_signature__', None) is None: + assert formatsig('class', 'C', ExceptionSubclass, None, None) == '' + + # __init__ have signature at first line of docstring + directive.env.config.autoclass_content = 'both' + + class F2: + """some docstring for F2.""" + def __init__(self, *args, **kw): + """ + __init__(a1, a2, kw1=True, kw2=False) + + some docstring for __init__. + """ + class G2(F2): + pass + + assert formatsig('class', 'F2', F2, None, None) == \ + '(a1, a2, kw1=True, kw2=False)' + assert formatsig('class', 'G2', G2, None, None) == \ + '(a1, a2, kw1=True, kw2=False)' + + # test for methods + class H: + def foo1(self, b, *c): + pass + + def foo2(b, *c): + pass + + def foo3(self, d='\n'): + pass + assert formatsig('method', 'H.foo', H.foo1, None, None) == '(b, *c)' + assert formatsig('method', 'H.foo', H.foo1, 'a', None) == '(a)' + assert formatsig('method', 'H.foo', H.foo2, None, None) == '(*c)' + assert formatsig('method', 'H.foo', H.foo3, None, None) == r"(d='\n')" + + # test bound methods interpreted as functions + assert formatsig('function', 'foo', H().foo1, None, None) == '(b, *c)' + assert formatsig('function', 'foo', H().foo2, None, None) == '(*c)' + assert formatsig('function', 'foo', H().foo3, None, None) == r"(d='\n')" + + # test exception handling (exception is caught and args is '') + directive.env.config.autodoc_docstring_signature = False + assert formatsig('function', 'int', int, None, None) == '' + + # test processing by event handler + assert formatsig('method', 'bar', H.foo1, None, None) == '42' + + # test functions created via functools.partial + from functools import partial + curried1 = partial(lambda a, b, c: None, 'A') + assert formatsig('function', 'curried1', curried1, None, None) == \ + '(b, c)' + curried2 = partial(lambda a, b, c=42: None, 'A') + assert formatsig('function', 'curried2', curried2, None, None) == \ + '(b, c=42)' + curried3 = partial(lambda a, b, *c: None, 'A') + assert formatsig('function', 'curried3', curried3, None, None) == \ + '(b, *c)' + curried4 = partial(lambda a, b, c=42, *d, **e: None, 'A') + assert formatsig('function', 'curried4', curried4, None, None) == \ + '(b, c=42, *d, **e)' + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_process_signature_typing_generic(app): + actual = do_autodoc(app, 'class', 'target.generic_class.A', {}) + + assert list(actual) == [ + '', + '.. py:class:: A(a, b=None)', + ' :module: target.generic_class', + '', + ' docstring for A', + '', + ] + + +def test_autodoc_process_signature_typehints(app): + captured = [] + + def process_signature(*args): + captured.append(args) + + app.connect('autodoc-process-signature', process_signature) + + def func(x: int, y: int) -> int: + pass + + directive = make_directive_bridge(app.env) + inst = app.registry.documenters['function'](directive, 'func') + inst.fullname = 'func' + inst.object = func + inst.objpath = ['func'] + inst.format_signature() + assert captured == [(app, 'function', 'func', func, + directive.genopt, '(x: int, y: int)', 'int')] + + +def test_get_doc(app): + directive = make_directive_bridge(app.env) + + def getdocl(objtype, obj): + inst = app.registry.documenters[objtype](directive, 'tmp') + inst.parent = object # dummy + inst.object = obj + inst.objpath = [obj.__name__] + inst.doc_as_attr = False + inst.format_signature() # handle docstring signatures! + ds = inst.get_doc() + # for testing purposes, concat them and strip the empty line at the end + res = sum(ds, [])[:-1] + print(res) + return res + + # objects without docstring + def f(): + pass + assert getdocl('function', f) == [] + + # standard function, diverse docstring styles... + def f(): + """Docstring""" + def g(): + """ + Docstring + """ + for func in (f, g): + assert getdocl('function', func) == ['Docstring'] + + # first line vs. other lines indentation + def f(): + """First line + + Other + lines + """ + assert getdocl('function', f) == ['First line', '', 'Other', ' lines'] + + # charset guessing (this module is encoded in utf-8) + def f(): + """Döcstring""" + assert getdocl('function', f) == ['Döcstring'] + + # verify that method docstrings get extracted in both normal case + # and in case of bound method posing as a function + class J: + def foo(self): + """Method docstring""" + assert getdocl('method', J.foo) == ['Method docstring'] + assert getdocl('function', J().foo) == ['Method docstring'] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_new_documenter(app): + class MyDocumenter(ModuleLevelDocumenter): + objtype = 'integer' + directivetype = 'integer' + priority = 100 + + @classmethod + def can_document_member(cls, member, membername, isattr, parent): + return isinstance(member, int) + + def document_members(self, all_members=False): + return + + app.add_autodocumenter(MyDocumenter) + + options = {"members": 'integer'} + actual = do_autodoc(app, 'module', 'target', options) + assert list(actual) == [ + '', + '.. py:module:: target', + '', + '', + '.. py:integer:: integer', + ' :module: target', + '', + ' documentation for the integer', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_attrgetter_using(app): + directive = make_directive_bridge(app.env) + directive.genopt['members'] = ALL + + directive.genopt['inherited_members'] = False + with catch_warnings(record=True): + _assert_getter_works(app, directive, 'class', 'target.Class', ['meth']) + + directive.genopt['inherited_members'] = True + with catch_warnings(record=True): + _assert_getter_works(app, directive, 'class', 'target.inheritance.Derived', ['inheritedmeth']) + + +def _assert_getter_works(app, directive, objtype, name, attrs=(), **kw): + getattr_spy = [] + + def _special_getattr(obj, attr_name, *defargs): + if attr_name in attrs: + getattr_spy.append((obj, attr_name)) + return None + return getattr(obj, attr_name, *defargs) + + app.add_autodoc_attrgetter(type, _special_getattr) + + getattr_spy.clear() + app.registry.documenters[objtype](directive, name).generate(**kw) + + hooked_members = {s[1] for s in getattr_spy} + documented_members = {s[1] for s in processed_signatures} + for attr in attrs: + fullname = '.'.join((name, attr)) + assert attr in hooked_members + assert fullname not in documented_members, f'{fullname!r} not intercepted' + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_py_module(app, warning): + # without py:module + actual = do_autodoc(app, 'method', 'Class.meth') + assert list(actual) == [] + assert ("don't know which module to import for autodocumenting 'Class.meth'" + in warning.getvalue()) + + # with py:module + app.env.ref_context['py:module'] = 'target' + warning.truncate(0) + + actual = do_autodoc(app, 'method', 'Class.meth') + assert list(actual) == [ + '', + '.. py:method:: Class.meth()', + ' :module: target', + '', + ' Function.', + '', + ] + assert ("don't know which module to import for autodocumenting 'Class.meth'" + not in warning.getvalue()) + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_decorator(app): + actual = do_autodoc(app, 'decorator', 'target.decorator.deco1') + assert list(actual) == [ + '', + '.. py:decorator:: deco1', + ' :module: target.decorator', + '', + ' docstring for deco1', + '', + ] + + actual = do_autodoc(app, 'decorator', 'target.decorator.deco2') + assert list(actual) == [ + '', + '.. py:decorator:: deco2(condition, message)', + ' :module: target.decorator', + '', + ' docstring for deco2', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_exception(app): + actual = do_autodoc(app, 'exception', 'target.CustomEx') + assert list(actual) == [ + '', + '.. py:exception:: CustomEx', + ' :module: target', + '', + ' My custom exception.', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_warnings(app, warning): + app.env.temp_data['docname'] = 'dummy' + + # can't import module + do_autodoc(app, 'module', 'unknown') + assert "failed to import module 'unknown'" in warning.getvalue() + + # missing function + do_autodoc(app, 'function', 'unknown') + assert "import for autodocumenting 'unknown'" in warning.getvalue() + + do_autodoc(app, 'function', 'target.unknown') + assert "failed to import function 'unknown' from module 'target'" in warning.getvalue() + + # missing method + do_autodoc(app, 'method', 'target.Class.unknown') + assert "failed to import method 'Class.unknown' from module 'target'" in warning.getvalue() + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_attributes(app): + options = {"synopsis": 'Synopsis', + "platform": "Platform", + "deprecated": None} + actual = do_autodoc(app, 'module', 'target', options) + assert list(actual) == [ + '', + '.. py:module:: target', + ' :synopsis: Synopsis', + ' :platform: Platform', + ' :deprecated:', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_members(app): + # default (no-members) + actual = do_autodoc(app, 'class', 'target.inheritance.Base') + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Base()', + ] + + # default ALL-members + options = {"members": None} + actual = do_autodoc(app, 'class', 'target.inheritance.Base', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Base()', + ' .. py:attribute:: Base.inheritedattr', + ' .. py:method:: Base.inheritedclassmeth()', + ' .. py:method:: Base.inheritedmeth()', + ' .. py:method:: Base.inheritedstaticmeth(cls)', + ] + + # default specific-members + options = {"members": "inheritedmeth,inheritedstaticmeth"} + actual = do_autodoc(app, 'class', 'target.inheritance.Base', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Base()', + ' .. py:method:: Base.inheritedmeth()', + ' .. py:method:: Base.inheritedstaticmeth(cls)', + ] + + # ALL-members override autodoc_default_options + options = {"members": None} + app.config.autodoc_default_options["members"] = "inheritedstaticmeth" + actual = do_autodoc(app, 'class', 'target.inheritance.Base', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Base()', + ' .. py:attribute:: Base.inheritedattr', + ' .. py:method:: Base.inheritedclassmeth()', + ' .. py:method:: Base.inheritedmeth()', + ' .. py:method:: Base.inheritedstaticmeth(cls)', + ] + + # members override autodoc_default_options + options = {"members": "inheritedmeth"} + app.config.autodoc_default_options["members"] = "inheritedstaticmeth" + actual = do_autodoc(app, 'class', 'target.inheritance.Base', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Base()', + ' .. py:method:: Base.inheritedmeth()', + ] + + # members extends autodoc_default_options + options = {"members": "+inheritedmeth"} + app.config.autodoc_default_options["members"] = "inheritedstaticmeth" + actual = do_autodoc(app, 'class', 'target.inheritance.Base', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Base()', + ' .. py:method:: Base.inheritedmeth()', + ' .. py:method:: Base.inheritedstaticmeth(cls)', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_exclude_members(app): + options = {"members": None, + "exclude-members": "inheritedmeth,inheritedstaticmeth"} + actual = do_autodoc(app, 'class', 'target.inheritance.Base', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Base()', + ' .. py:attribute:: Base.inheritedattr', + ' .. py:method:: Base.inheritedclassmeth()', + ] + + # members vs exclude-members + options = {"members": "inheritedmeth", + "exclude-members": "inheritedmeth"} + actual = do_autodoc(app, 'class', 'target.inheritance.Base', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Base()', + ] + + # + has no effect when autodoc_default_options are not present + options = {"members": None, + "exclude-members": "+inheritedmeth,inheritedstaticmeth"} + actual = do_autodoc(app, 'class', 'target.inheritance.Base', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Base()', + ' .. py:attribute:: Base.inheritedattr', + ' .. py:method:: Base.inheritedclassmeth()', + ] + + # exclude-members overrides autodoc_default_options + options = {"members": None, + "exclude-members": "inheritedmeth"} + app.config.autodoc_default_options["exclude-members"] = "inheritedstaticmeth" + actual = do_autodoc(app, 'class', 'target.inheritance.Base', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Base()', + ' .. py:attribute:: Base.inheritedattr', + ' .. py:method:: Base.inheritedclassmeth()', + ' .. py:method:: Base.inheritedstaticmeth(cls)', + ] + + # exclude-members extends autodoc_default_options + options = {"members": None, + "exclude-members": "+inheritedmeth"} + app.config.autodoc_default_options["exclude-members"] = "inheritedstaticmeth" + actual = do_autodoc(app, 'class', 'target.inheritance.Base', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Base()', + ' .. py:attribute:: Base.inheritedattr', + ' .. py:method:: Base.inheritedclassmeth()', + ] + + # no exclude-members causes use autodoc_default_options + options = {"members": None} + app.config.autodoc_default_options["exclude-members"] = "inheritedstaticmeth,inheritedmeth" + actual = do_autodoc(app, 'class', 'target.inheritance.Base', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Base()', + ' .. py:attribute:: Base.inheritedattr', + ' .. py:method:: Base.inheritedclassmeth()', + ] + + # empty exclude-members cancels autodoc_default_options + options = {"members": None, + "exclude-members": None} + app.config.autodoc_default_options["exclude-members"] = "inheritedstaticmeth,inheritedmeth" + actual = do_autodoc(app, 'class', 'target.inheritance.Base', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Base()', + ' .. py:attribute:: Base.inheritedattr', + ' .. py:method:: Base.inheritedclassmeth()', + ' .. py:method:: Base.inheritedmeth()', + ' .. py:method:: Base.inheritedstaticmeth(cls)', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_undoc_members(app): + options = {"members": None, + "undoc-members": None} + actual = do_autodoc(app, 'class', 'target.Class', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Class(arg)', + ' .. py:attribute:: Class.attr', + ' .. py:attribute:: Class.docattr', + ' .. py:method:: Class.excludemeth()', + ' .. py:attribute:: Class.inst_attr_comment', + ' .. py:attribute:: Class.inst_attr_inline', + ' .. py:attribute:: Class.inst_attr_string', + ' .. py:attribute:: Class.mdocattr', + ' .. py:method:: Class.meth()', + ' .. py:method:: Class.moore(a, e, f) -> happiness', + ' .. py:method:: Class.roger(a, *, b=2, c=3, d=4, e=5, f=6)', + ' .. py:attribute:: Class.skipattr', + ' .. py:method:: Class.skipmeth()', + ' .. py:attribute:: Class.udocattr', + ' .. py:method:: Class.undocmeth()', + ] + + # use autodoc_default_options + options = {"members": None} + app.config.autodoc_default_options["undoc-members"] = None + actual = do_autodoc(app, 'class', 'target.Class', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Class(arg)', + ' .. py:attribute:: Class.attr', + ' .. py:attribute:: Class.docattr', + ' .. py:method:: Class.excludemeth()', + ' .. py:attribute:: Class.inst_attr_comment', + ' .. py:attribute:: Class.inst_attr_inline', + ' .. py:attribute:: Class.inst_attr_string', + ' .. py:attribute:: Class.mdocattr', + ' .. py:method:: Class.meth()', + ' .. py:method:: Class.moore(a, e, f) -> happiness', + ' .. py:method:: Class.roger(a, *, b=2, c=3, d=4, e=5, f=6)', + ' .. py:attribute:: Class.skipattr', + ' .. py:method:: Class.skipmeth()', + ' .. py:attribute:: Class.udocattr', + ' .. py:method:: Class.undocmeth()', + ] + + # options negation work check + options = {"members": None, + "no-undoc-members": None} + app.config.autodoc_default_options["undoc-members"] = None + actual = do_autodoc(app, 'class', 'target.Class', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Class(arg)', + ' .. py:attribute:: Class.attr', + ' .. py:attribute:: Class.docattr', + ' .. py:method:: Class.excludemeth()', + ' .. py:attribute:: Class.inst_attr_comment', + ' .. py:attribute:: Class.inst_attr_inline', + ' .. py:attribute:: Class.inst_attr_string', + ' .. py:attribute:: Class.mdocattr', + ' .. py:method:: Class.meth()', + ' .. py:method:: Class.moore(a, e, f) -> happiness', + ' .. py:method:: Class.skipmeth()', + ' .. py:attribute:: Class.udocattr', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_undoc_members_for_metadata_only(app): + # metadata only member is not displayed + options = {"members": None} + actual = do_autodoc(app, 'module', 'target.metadata', options) + assert list(actual) == [ + '', + '.. py:module:: target.metadata', + '', + ] + + # metadata only member is displayed when undoc-member given + options = {"members": None, + "undoc-members": None} + actual = do_autodoc(app, 'module', 'target.metadata', options) + assert list(actual) == [ + '', + '.. py:module:: target.metadata', + '', + '', + '.. py:function:: foo()', + ' :module: target.metadata', + '', + ' :meta metadata-only-docstring:', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_inherited_members(app): + options = {"members": None, + "inherited-members": None} + actual = do_autodoc(app, 'class', 'target.inheritance.Derived', options) + assert list(filter(lambda l: 'method::' in l, actual)) == [ + ' .. py:method:: Derived.inheritedclassmeth()', + ' .. py:method:: Derived.inheritedmeth()', + ' .. py:method:: Derived.inheritedstaticmeth(cls)', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_inherited_members_Base(app): + options = {"members": None, + "inherited-members": "Base", + "special-members": None} + + # check methods for object class are shown + actual = do_autodoc(app, 'class', 'target.inheritance.Derived', options) + assert ' .. py:method:: Derived.inheritedmeth()' in actual + assert ' .. py:method:: Derived.inheritedclassmeth' not in actual + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_inherited_members_None(app): + options = {"members": None, + "inherited-members": "None", + "special-members": None} + + # check methods for object class are shown + actual = do_autodoc(app, 'class', 'target.inheritance.Derived', options) + assert ' .. py:method:: Derived.__init__()' in actual + assert ' .. py:method:: Derived.__str__()' in actual + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_imported_members(app): + options = {"members": None, + "imported-members": None, + "ignore-module-all": None} + actual = do_autodoc(app, 'module', 'target', options) + assert '.. py:function:: function_to_be_imported(app: ~sphinx.application.Sphinx | None) -> str' in actual + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_special_members(app): + # specific special methods + options = {"undoc-members": None, + "special-members": "__init__,__special1__"} + actual = do_autodoc(app, 'class', 'target.Class', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Class(arg)', + ' .. py:method:: Class.__init__(arg)', + ' .. py:method:: Class.__special1__()', + ] + + # combination with specific members + options = {"members": "attr,docattr", + "undoc-members": None, + "special-members": "__init__,__special1__"} + actual = do_autodoc(app, 'class', 'target.Class', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Class(arg)', + ' .. py:method:: Class.__init__(arg)', + ' .. py:method:: Class.__special1__()', + ' .. py:attribute:: Class.attr', + ' .. py:attribute:: Class.docattr', + ] + + # all special methods + options = {"members": None, + "undoc-members": None, + "special-members": None} + actual = do_autodoc(app, 'class', 'target.Class', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Class(arg)', + ' .. py:attribute:: Class.__annotations__', + ' .. py:attribute:: Class.__dict__', + ' .. py:method:: Class.__init__(arg)', + ' .. py:attribute:: Class.__module__', + ' .. py:method:: Class.__special1__()', + ' .. py:method:: Class.__special2__()', + ' .. py:attribute:: Class.__weakref__', + ' .. py:attribute:: Class.attr', + ' .. py:attribute:: Class.docattr', + ' .. py:method:: Class.excludemeth()', + ' .. py:attribute:: Class.inst_attr_comment', + ' .. py:attribute:: Class.inst_attr_inline', + ' .. py:attribute:: Class.inst_attr_string', + ' .. py:attribute:: Class.mdocattr', + ' .. py:method:: Class.meth()', + ' .. py:method:: Class.moore(a, e, f) -> happiness', + ' .. py:method:: Class.roger(a, *, b=2, c=3, d=4, e=5, f=6)', + ' .. py:attribute:: Class.skipattr', + ' .. py:method:: Class.skipmeth()', + ' .. py:attribute:: Class.udocattr', + ' .. py:method:: Class.undocmeth()', + ] + + # specific special methods from autodoc_default_options + options = {"undoc-members": None} + app.config.autodoc_default_options["special-members"] = "__special2__" + actual = do_autodoc(app, 'class', 'target.Class', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Class(arg)', + ' .. py:method:: Class.__special2__()', + ] + + # specific special methods option with autodoc_default_options + options = {"undoc-members": None, + "special-members": "__init__,__special1__"} + app.config.autodoc_default_options["special-members"] = "__special2__" + actual = do_autodoc(app, 'class', 'target.Class', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Class(arg)', + ' .. py:method:: Class.__init__(arg)', + ' .. py:method:: Class.__special1__()', + ] + + # specific special methods merge with autodoc_default_options + options = {"undoc-members": None, + "special-members": "+__init__,__special1__"} + app.config.autodoc_default_options["special-members"] = "__special2__" + actual = do_autodoc(app, 'class', 'target.Class', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Class(arg)', + ' .. py:method:: Class.__init__(arg)', + ' .. py:method:: Class.__special1__()', + ' .. py:method:: Class.__special2__()', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_ignore_module_all(app): + # default (no-ignore-module-all) + options = {"members": None} + actual = do_autodoc(app, 'module', 'target', options) + assert list(filter(lambda l: 'class::' in l, actual)) == [ + '.. py:class:: Class(arg)', + ] + + # ignore-module-all + options = {"members": None, + "ignore-module-all": None} + actual = do_autodoc(app, 'module', 'target', options) + assert list(filter(lambda l: 'class::' in l, actual)) == [ + '.. py:class:: Class(arg)', + '.. py:class:: CustomDict', + '.. py:class:: InnerChild()', + '.. py:class:: InstAttCls()', + '.. py:class:: Outer()', + ' .. py:class:: Outer.Inner()', + '.. py:class:: StrRepr', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_noindex(app): + options = {"no-index": None} + actual = do_autodoc(app, 'module', 'target', options) + assert list(actual) == [ + '', + '.. py:module:: target', + ' :no-index:', + '', + ] + + # TODO: :no-index: should be propagated to children of target item. + + actual = do_autodoc(app, 'class', 'target.inheritance.Base', options) + assert list(actual) == [ + '', + '.. py:class:: Base()', + ' :no-index:', + ' :module: target.inheritance', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_subclass_of_builtin_class(app): + options = {"members": None} + actual = do_autodoc(app, 'class', 'target.CustomDict', options) + assert list(actual) == [ + '', + '.. py:class:: CustomDict', + ' :module: target', + '', + ' Docstring.', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_inner_class(app): + options = {"members": None} + actual = do_autodoc(app, 'class', 'target.Outer', options) + assert list(actual) == [ + '', + '.. py:class:: Outer()', + ' :module: target', + '', + ' Foo', + '', + '', + ' .. py:class:: Outer.Inner()', + ' :module: target', + '', + ' Foo', + '', + '', + ' .. py:method:: Outer.Inner.meth()', + ' :module: target', + '', + ' Foo', + '', + '', + ' .. py:attribute:: Outer.factory', + ' :module: target', + '', + ' alias of :py:class:`dict`', + ] + + actual = do_autodoc(app, 'class', 'target.Outer.Inner', options) + assert list(actual) == [ + '', + '.. py:class:: Inner()', + ' :module: target.Outer', + '', + ' Foo', + '', + '', + ' .. py:method:: Inner.meth()', + ' :module: target.Outer', + '', + ' Foo', + '', + ] + + options['show-inheritance'] = None + actual = do_autodoc(app, 'class', 'target.InnerChild', options) + assert list(actual) == [ + '', + '.. py:class:: InnerChild()', + ' :module: target', '', + ' Bases: :py:class:`~target.Outer.Inner`', + '', + ' InnerChild docstring', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_classmethod(app): + actual = do_autodoc(app, 'method', 'target.inheritance.Base.inheritedclassmeth') + assert list(actual) == [ + '', + '.. py:method:: Base.inheritedclassmeth()', + ' :module: target.inheritance', + ' :classmethod:', + '', + ' Inherited class method.', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_staticmethod(app): + actual = do_autodoc(app, 'method', 'target.inheritance.Base.inheritedstaticmeth') + assert list(actual) == [ + '', + '.. py:method:: Base.inheritedstaticmeth(cls)', + ' :module: target.inheritance', + ' :staticmethod:', + '', + ' Inherited static method.', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_descriptor(app): + options = {"members": None, + "undoc-members": None} + actual = do_autodoc(app, 'class', 'target.descriptor.Class', options) + assert list(actual) == [ + '', + '.. py:class:: Class()', + ' :module: target.descriptor', + '', + '', + ' .. py:attribute:: Class.descr', + ' :module: target.descriptor', + '', + ' Descriptor instance docstring.', + '', + '', + ' .. py:property:: Class.prop', + ' :module: target.descriptor', + '', + ' Property.', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_cached_property(app): + options = {"members": None, + "undoc-members": None} + actual = do_autodoc(app, 'class', 'target.cached_property.Foo', options) + assert list(actual) == [ + '', + '.. py:class:: Foo()', + ' :module: target.cached_property', + '', + '', + ' .. py:property:: Foo.prop', + ' :module: target.cached_property', + ' :type: int', + '', + '', + ' .. py:property:: Foo.prop_with_type_comment', + ' :module: target.cached_property', + ' :type: int', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_member_order(app): + # case member-order='bysource' + options = {"members": None, + 'member-order': 'bysource', + "undoc-members": None, + 'private-members': None} + actual = do_autodoc(app, 'class', 'target.Class', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Class(arg)', + ' .. py:method:: Class.meth()', + ' .. py:method:: Class.undocmeth()', + ' .. py:method:: Class.skipmeth()', + ' .. py:method:: Class.excludemeth()', + ' .. py:attribute:: Class.skipattr', + ' .. py:attribute:: Class.attr', + ' .. py:attribute:: Class.docattr', + ' .. py:attribute:: Class.udocattr', + ' .. py:attribute:: Class.mdocattr', + ' .. py:method:: Class.roger(a, *, b=2, c=3, d=4, e=5, f=6)', + ' .. py:method:: Class.moore(a, e, f) -> happiness', + ' .. py:attribute:: Class.inst_attr_inline', + ' .. py:attribute:: Class.inst_attr_comment', + ' .. py:attribute:: Class.inst_attr_string', + ' .. py:attribute:: Class._private_inst_attr', + ] + + # case member-order='groupwise' + options = {"members": None, + 'member-order': 'groupwise', + "undoc-members": None, + 'private-members': None} + actual = do_autodoc(app, 'class', 'target.Class', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Class(arg)', + ' .. py:method:: Class.excludemeth()', + ' .. py:method:: Class.meth()', + ' .. py:method:: Class.moore(a, e, f) -> happiness', + ' .. py:method:: Class.roger(a, *, b=2, c=3, d=4, e=5, f=6)', + ' .. py:method:: Class.skipmeth()', + ' .. py:method:: Class.undocmeth()', + ' .. py:attribute:: Class._private_inst_attr', + ' .. py:attribute:: Class.attr', + ' .. py:attribute:: Class.docattr', + ' .. py:attribute:: Class.inst_attr_comment', + ' .. py:attribute:: Class.inst_attr_inline', + ' .. py:attribute:: Class.inst_attr_string', + ' .. py:attribute:: Class.mdocattr', + ' .. py:attribute:: Class.skipattr', + ' .. py:attribute:: Class.udocattr', + ] + + # case member-order=None + options = {"members": None, + "undoc-members": None, + 'private-members': None} + actual = do_autodoc(app, 'class', 'target.Class', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Class(arg)', + ' .. py:attribute:: Class._private_inst_attr', + ' .. py:attribute:: Class.attr', + ' .. py:attribute:: Class.docattr', + ' .. py:method:: Class.excludemeth()', + ' .. py:attribute:: Class.inst_attr_comment', + ' .. py:attribute:: Class.inst_attr_inline', + ' .. py:attribute:: Class.inst_attr_string', + ' .. py:attribute:: Class.mdocattr', + ' .. py:method:: Class.meth()', + ' .. py:method:: Class.moore(a, e, f) -> happiness', + ' .. py:method:: Class.roger(a, *, b=2, c=3, d=4, e=5, f=6)', + ' .. py:attribute:: Class.skipattr', + ' .. py:method:: Class.skipmeth()', + ' .. py:attribute:: Class.udocattr', + ' .. py:method:: Class.undocmeth()', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_module_member_order(app): + # case member-order='bysource' + options = {"members": 'foo, Bar, baz, qux, Quux, foobar', + 'member-order': 'bysource', + "undoc-members": None} + actual = do_autodoc(app, 'module', 'target.sort_by_all', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:module:: target.sort_by_all', + '.. py:function:: baz()', + '.. py:function:: foo()', + '.. py:class:: Bar()', + '.. py:class:: Quux()', + '.. py:function:: foobar()', + '.. py:function:: qux()', + ] + + # case member-order='bysource' and ignore-module-all + options = {"members": 'foo, Bar, baz, qux, Quux, foobar', + 'member-order': 'bysource', + "undoc-members": None, + "ignore-module-all": None} + actual = do_autodoc(app, 'module', 'target.sort_by_all', options) + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:module:: target.sort_by_all', + '.. py:function:: foo()', + '.. py:class:: Bar()', + '.. py:function:: baz()', + '.. py:function:: qux()', + '.. py:class:: Quux()', + '.. py:function:: foobar()', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_module_scope(app): + app.env.temp_data['autodoc:module'] = 'target' + actual = do_autodoc(app, 'attribute', 'Class.mdocattr') + assert list(actual) == [ + '', + '.. py:attribute:: Class.mdocattr', + ' :module: target', + ' :value: <_io.StringIO object>', + '', + ' should be documented as well - süß', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_class_scope(app): + app.env.temp_data['autodoc:module'] = 'target' + app.env.temp_data['autodoc:class'] = 'Class' + actual = do_autodoc(app, 'attribute', 'mdocattr') + assert list(actual) == [ + '', + '.. py:attribute:: Class.mdocattr', + ' :module: target', + ' :value: <_io.StringIO object>', + '', + ' should be documented as well - süß', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_class_attributes(app): + options = {"members": None, + "undoc-members": None} + actual = do_autodoc(app, 'class', 'target.AttCls', options) + assert list(actual) == [ + '', + '.. py:class:: AttCls()', + ' :module: target', + '', + '', + ' .. py:attribute:: AttCls.a1', + ' :module: target', + ' :value: hello world', + '', + '', + ' .. py:attribute:: AttCls.a2', + ' :module: target', + ' :value: None', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autoclass_instance_attributes(app): + options = {"members": None} + actual = do_autodoc(app, 'class', 'target.InstAttCls', options) + assert list(actual) == [ + '', + '.. py:class:: InstAttCls()', + ' :module: target', + '', + ' Class with documented class and instance attributes.', + '', + '', + ' .. py:attribute:: InstAttCls.ca1', + ' :module: target', + " :value: 'a'", + '', + ' Doc comment for class attribute InstAttCls.ca1.', + ' It can have multiple lines.', + '', + '', + ' .. py:attribute:: InstAttCls.ca2', + ' :module: target', + " :value: 'b'", + '', + ' Doc comment for InstAttCls.ca2. One line only.', + '', + '', + ' .. py:attribute:: InstAttCls.ca3', + ' :module: target', + " :value: 'c'", + '', + ' Docstring for class attribute InstAttCls.ca3.', + '', + '', + ' .. py:attribute:: InstAttCls.ia1', + ' :module: target', + '', + ' Doc comment for instance attribute InstAttCls.ia1', + '', + '', + ' .. py:attribute:: InstAttCls.ia2', + ' :module: target', + '', + ' Docstring for instance attribute InstAttCls.ia2.', + '', + ] + + # pick up arbitrary attributes + options = {"members": 'ca1,ia1'} + actual = do_autodoc(app, 'class', 'target.InstAttCls', options) + assert list(actual) == [ + '', + '.. py:class:: InstAttCls()', + ' :module: target', + '', + ' Class with documented class and instance attributes.', + '', + '', + ' .. py:attribute:: InstAttCls.ca1', + ' :module: target', + " :value: 'a'", + '', + ' Doc comment for class attribute InstAttCls.ca1.', + ' It can have multiple lines.', + '', + '', + ' .. py:attribute:: InstAttCls.ia1', + ' :module: target', + '', + ' Doc comment for instance attribute InstAttCls.ia1', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autoattribute_instance_attributes(app): + actual = do_autodoc(app, 'attribute', 'target.InstAttCls.ia1') + assert list(actual) == [ + '', + '.. py:attribute:: InstAttCls.ia1', + ' :module: target', + '', + ' Doc comment for instance attribute InstAttCls.ia1', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_slots(app): + options = {"members": None, + "undoc-members": None} + actual = do_autodoc(app, 'module', 'target.slots', options) + assert list(actual) == [ + '', + '.. py:module:: target.slots', + '', + '', + '.. py:class:: Bar()', + ' :module: target.slots', + '', + ' docstring', + '', + '', + ' .. py:attribute:: Bar.attr1', + ' :module: target.slots', + ' :type: int', + '', + ' docstring of attr1', + '', + '', + ' .. py:attribute:: Bar.attr2', + ' :module: target.slots', + '', + ' docstring of instance attr2', + '', + '', + ' .. py:attribute:: Bar.attr3', + ' :module: target.slots', + '', + '', + '.. py:class:: Baz()', + ' :module: target.slots', + '', + ' docstring', + '', + '', + ' .. py:attribute:: Baz.attr', + ' :module: target.slots', + '', + '', + '.. py:class:: Foo()', + ' :module: target.slots', + '', + ' docstring', + '', + '', + ' .. py:attribute:: Foo.attr', + ' :module: target.slots', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_enum_class(app): + options = {"members": None} + actual = do_autodoc(app, 'class', 'target.enums.EnumCls', options) + + if sys.version_info[:2] >= (3, 12): + args = ('(value, names=None, *values, module=None, ' + 'qualname=None, type=None, start=1, boundary=None)') + elif sys.version_info[:2] >= (3, 11): + args = ('(value, names=None, *, module=None, qualname=None, ' + 'type=None, start=1, boundary=None)') + else: + args = '(value)' + + assert list(actual) == [ + '', + '.. py:class:: EnumCls' + args, + ' :module: target.enums', + '', + ' this is enum class', + '', + '', + ' .. py:method:: EnumCls.say_goodbye()', + ' :module: target.enums', + ' :classmethod:', + '', + ' a classmethod says good-bye to you.', + '', + '', + ' .. py:method:: EnumCls.say_hello()', + ' :module: target.enums', + '', + ' a method says hello to you.', + '', + '', + ' .. py:attribute:: EnumCls.val1', + ' :module: target.enums', + ' :value: 12', + '', + ' doc for val1', + '', + '', + ' .. py:attribute:: EnumCls.val2', + ' :module: target.enums', + ' :value: 23', + '', + ' doc for val2', + '', + '', + ' .. py:attribute:: EnumCls.val3', + ' :module: target.enums', + ' :value: 34', + '', + ' doc for val3', + '', + ] + + # checks for an attribute of EnumClass + actual = do_autodoc(app, 'attribute', 'target.enums.EnumCls.val1') + assert list(actual) == [ + '', + '.. py:attribute:: EnumCls.val1', + ' :module: target.enums', + ' :value: 12', + '', + ' doc for val1', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_descriptor_class(app): + options = {"members": 'CustomDataDescriptor,CustomDataDescriptor2'} + actual = do_autodoc(app, 'module', 'target.descriptor', options) + assert list(actual) == [ + '', + '.. py:module:: target.descriptor', + '', + '', + '.. py:class:: CustomDataDescriptor(doc)', + ' :module: target.descriptor', + '', + ' Descriptor class docstring.', + '', + '', + ' .. py:method:: CustomDataDescriptor.meth()', + ' :module: target.descriptor', + '', + ' Function.', + '', + '', + '.. py:class:: CustomDataDescriptor2(doc)', + ' :module: target.descriptor', + '', + ' Descriptor class with custom metaclass docstring.', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_automethod_for_builtin(app): + actual = do_autodoc(app, 'method', 'builtins.int.__add__') + assert list(actual) == [ + '', + '.. py:method:: int.__add__(value, /)', + ' :module: builtins', + '', + ' Return self+value.', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_automethod_for_decorated(app): + actual = do_autodoc(app, 'method', 'target.decorator.Bar.meth') + assert list(actual) == [ + '', + '.. py:method:: Bar.meth(name=None, age=None)', + ' :module: target.decorator', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_abstractmethods(app): + options = {"members": None, + "undoc-members": None} + actual = do_autodoc(app, 'module', 'target.abstractmethods', options) + assert list(actual) == [ + '', + '.. py:module:: target.abstractmethods', + '', + '', + '.. py:class:: Base()', + ' :module: target.abstractmethods', + '', + '', + ' .. py:method:: Base.abstractmeth()', + ' :module: target.abstractmethods', + ' :abstractmethod:', + '', + '', + ' .. py:method:: Base.classmeth()', + ' :module: target.abstractmethods', + ' :abstractmethod:', + ' :classmethod:', + '', + '', + ' .. py:method:: Base.coroutinemeth()', + ' :module: target.abstractmethods', + ' :abstractmethod:', + ' :async:', + '', + '', + ' .. py:method:: Base.meth()', + ' :module: target.abstractmethods', + '', + '', + ' .. py:property:: Base.prop', + ' :module: target.abstractmethods', + ' :abstractmethod:', + '', + '', + ' .. py:method:: Base.staticmeth()', + ' :module: target.abstractmethods', + ' :abstractmethod:', + ' :staticmethod:', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_partialfunction(app): + options = {"members": None} + actual = do_autodoc(app, 'module', 'target.partialfunction', options) + assert list(actual) == [ + '', + '.. py:module:: target.partialfunction', + '', + '', + '.. py:function:: func1(a, b, c)', + ' :module: target.partialfunction', + '', + ' docstring of func1', + '', + '', + '.. py:function:: func2(b, c)', + ' :module: target.partialfunction', + '', + ' docstring of func1', + '', + '', + '.. py:function:: func3(c)', + ' :module: target.partialfunction', + '', + ' docstring of func3', + '', + '', + '.. py:function:: func4()', + ' :module: target.partialfunction', + '', + ' docstring of func3', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_imported_partialfunction_should_not_shown_without_imported_members(app): + options = {"members": None} + actual = do_autodoc(app, 'module', 'target.imported_members', options) + assert list(actual) == [ + '', + '.. py:module:: target.imported_members', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_bound_method(app): + options = {"members": None} + actual = do_autodoc(app, 'module', 'target.bound_method', options) + assert list(actual) == [ + '', + '.. py:module:: target.bound_method', + '', + '', + '.. py:function:: bound_method()', + ' :module: target.bound_method', + '', + ' Method docstring', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_partialmethod(app): + expected = [ + '', + '.. py:class:: Cell()', + ' :module: target.partialmethod', + '', + ' An example for partialmethod.', + '', + ' refs: https://docs.python.jp/3/library/functools.html#functools.partialmethod', + '', + '', + ' .. py:method:: Cell.set_alive()', + ' :module: target.partialmethod', + '', + ' Make a cell alive.', + '', + '', + ' .. py:method:: Cell.set_state(state)', + ' :module: target.partialmethod', + '', + ' Update state of cell to *state*.', + '', + ] + + options = {"members": None} + actual = do_autodoc(app, 'class', 'target.partialmethod.Cell', options) + assert list(actual) == expected + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_partialmethod_undoc_members(app): + expected = [ + '', + '.. py:class:: Cell()', + ' :module: target.partialmethod', + '', + ' An example for partialmethod.', + '', + ' refs: https://docs.python.jp/3/library/functools.html#functools.partialmethod', + '', + '', + ' .. py:method:: Cell.set_alive()', + ' :module: target.partialmethod', + '', + ' Make a cell alive.', + '', + '', + ' .. py:method:: Cell.set_dead()', + ' :module: target.partialmethod', + '', + '', + ' .. py:method:: Cell.set_state(state)', + ' :module: target.partialmethod', + '', + ' Update state of cell to *state*.', + '', + ] + + options = {"members": None, + "undoc-members": None} + actual = do_autodoc(app, 'class', 'target.partialmethod.Cell', options) + assert list(actual) == expected + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_typed_instance_variables(app): + options = {"members": None, + "undoc-members": None} + actual = do_autodoc(app, 'module', 'target.typed_vars', options) + assert list(actual) == [ + '', + '.. py:module:: target.typed_vars', + '', + '', + '.. py:attribute:: Alias', + ' :module: target.typed_vars', + '', + ' alias of :py:class:`~target.typed_vars.Derived`', + '', + '.. py:class:: Class()', + ' :module: target.typed_vars', + '', + '', + ' .. py:attribute:: Class.attr1', + ' :module: target.typed_vars', + ' :type: int', + ' :value: 0', + '', + '', + ' .. py:attribute:: Class.attr2', + ' :module: target.typed_vars', + ' :type: int', + '', + '', + ' .. py:attribute:: Class.attr3', + ' :module: target.typed_vars', + ' :type: int', + ' :value: 0', + '', + '', + ' .. py:attribute:: Class.attr4', + ' :module: target.typed_vars', + ' :type: int', + '', + ' attr4', + '', + '', + ' .. py:attribute:: Class.attr5', + ' :module: target.typed_vars', + ' :type: int', + '', + ' attr5', + '', + '', + ' .. py:attribute:: Class.attr6', + ' :module: target.typed_vars', + ' :type: int', + '', + ' attr6', + '', + '', + ' .. py:attribute:: Class.descr4', + ' :module: target.typed_vars', + ' :type: int', + '', + ' This is descr4', + '', + '', + '.. py:class:: Derived()', + ' :module: target.typed_vars', + '', + '', + ' .. py:attribute:: Derived.attr7', + ' :module: target.typed_vars', + ' :type: int', + '', + '', + '.. py:data:: attr1', + ' :module: target.typed_vars', + ' :type: str', + " :value: ''", + '', + ' attr1', + '', + '', + '.. py:data:: attr2', + ' :module: target.typed_vars', + ' :type: str', + '', + ' attr2', + '', + '', + '.. py:data:: attr3', + ' :module: target.typed_vars', + ' :type: str', + " :value: ''", + '', + ' attr3', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_typed_inherited_instance_variables(app): + options = {"members": None, + "undoc-members": None, + "inherited-members": None} + actual = do_autodoc(app, 'class', 'target.typed_vars.Derived', options) + assert list(actual) == [ + '', + '.. py:class:: Derived()', + ' :module: target.typed_vars', + '', + '', + ' .. py:attribute:: Derived.attr1', + ' :module: target.typed_vars', + ' :type: int', + ' :value: 0', + '', + '', + ' .. py:attribute:: Derived.attr2', + ' :module: target.typed_vars', + ' :type: int', + '', + '', + ' .. py:attribute:: Derived.attr3', + ' :module: target.typed_vars', + ' :type: int', + ' :value: 0', + '', + '', + ' .. py:attribute:: Derived.attr4', + ' :module: target.typed_vars', + ' :type: int', + '', + ' attr4', + '', + '', + ' .. py:attribute:: Derived.attr5', + ' :module: target.typed_vars', + ' :type: int', + '', + ' attr5', + '', + '', + ' .. py:attribute:: Derived.attr6', + ' :module: target.typed_vars', + ' :type: int', + '', + ' attr6', + '', + '', + ' .. py:attribute:: Derived.attr7', + ' :module: target.typed_vars', + ' :type: int', + '', + '', + ' .. py:attribute:: Derived.descr4', + ' :module: target.typed_vars', + ' :type: int', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_GenericAlias(app): + options = {"members": None, + "undoc-members": None} + actual = do_autodoc(app, 'module', 'target.genericalias', options) + assert list(actual) == [ + '', + '.. py:module:: target.genericalias', + '', + '', + '.. py:class:: Class()', + ' :module: target.genericalias', + '', + '', + ' .. py:attribute:: Class.T', + ' :module: target.genericalias', + '', + ' A list of int', + '', + ' alias of :py:class:`~typing.List`\\ [:py:class:`int`]', + '', + '', + '.. py:data:: L', + ' :module: target.genericalias', + '', + ' A list of Class', + '', + ' alias of :py:class:`~typing.List`\\ ' + '[:py:class:`~target.genericalias.Class`]', + '', + '', + '.. py:data:: T', + ' :module: target.genericalias', + '', + ' A list of int', + '', + ' alias of :py:class:`~typing.List`\\ [:py:class:`int`]', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_TypeVar(app): + options = {"members": None, + "undoc-members": None} + actual = do_autodoc(app, 'module', 'target.typevar', options) + assert list(actual) == [ + '', + '.. py:module:: target.typevar', + '', + '', + '.. py:class:: Class()', + ' :module: target.typevar', + '', + '', + ' .. py:class:: Class.T1', + ' :module: target.typevar', + '', + ' T1', + '', + " alias of TypeVar('T1')", + '', + '', + ' .. py:class:: Class.T6', + ' :module: target.typevar', + '', + ' T6', + '', + ' alias of :py:class:`~datetime.date`', + '', + '', + '.. py:class:: T1', + ' :module: target.typevar', + '', + ' T1', + '', + " alias of TypeVar('T1')", + '', + '', + '.. py:class:: T3', + ' :module: target.typevar', + '', + ' T3', + '', + " alias of TypeVar('T3', int, str)", + '', + '', + '.. py:class:: T4', + ' :module: target.typevar', + '', + ' T4', + '', + " alias of TypeVar('T4', covariant=True)", + '', + '', + '.. py:class:: T5', + ' :module: target.typevar', + '', + ' T5', + '', + " alias of TypeVar('T5', contravariant=True)", + '', + '', + '.. py:class:: T6', + ' :module: target.typevar', + '', + ' T6', + '', + ' alias of :py:class:`~datetime.date`', + '', + '', + '.. py:class:: T7', + ' :module: target.typevar', + '', + ' T7', + '', + " alias of TypeVar('T7', bound=\\ :py:class:`int`)", + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_Annotated(app): + options = {"members": None} + actual = do_autodoc(app, 'module', 'target.annotated', options) + assert list(actual) == [ + '', + '.. py:module:: target.annotated', + '', + '', + '.. py:function:: hello(name: str) -> None', + ' :module: target.annotated', + '', + ' docstring', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_TYPE_CHECKING(app): + options = {"members": None, + "undoc-members": None} + actual = do_autodoc(app, 'module', 'target.TYPE_CHECKING', options) + assert list(actual) == [ + '', + '.. py:module:: target.TYPE_CHECKING', + '', + '', + '.. py:class:: Foo()', + ' :module: target.TYPE_CHECKING', + '', + '', + ' .. py:attribute:: Foo.attr1', + ' :module: target.TYPE_CHECKING', + ' :type: ~_io.StringIO', + '', + '', + '.. py:function:: spam(ham: ~collections.abc.Iterable[str]) -> tuple[~gettext.NullTranslations, bool]', + ' :module: target.TYPE_CHECKING', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_TYPE_CHECKING_circular_import(app): + options = {"members": None, + "undoc-members": None} + actual = do_autodoc(app, 'module', 'circular_import', options) + assert list(actual) == [ + '', + '.. py:module:: circular_import', + '', + ] + assert sys.modules["circular_import"].a is sys.modules["circular_import.a"] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_singledispatch(app): + options = {"members": None} + actual = do_autodoc(app, 'module', 'target.singledispatch', options) + assert list(actual) == [ + '', + '.. py:module:: target.singledispatch', + '', + '', + '.. py:function:: func(arg, kwarg=None)', + ' func(arg: float, kwarg=None)', + ' func(arg: int, kwarg=None)', + ' func(arg: str, kwarg=None)', + ' func(arg: dict, kwarg=None)', + ' :module: target.singledispatch', + '', + ' A function for general use.', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_singledispatchmethod(app): + options = {"members": None} + actual = do_autodoc(app, 'module', 'target.singledispatchmethod', options) + assert list(actual) == [ + '', + '.. py:module:: target.singledispatchmethod', + '', + '', + '.. py:class:: Foo()', + ' :module: target.singledispatchmethod', + '', + ' docstring', + '', + '', + ' .. py:method:: Foo.meth(arg, kwarg=None)', + ' Foo.meth(arg: float, kwarg=None)', + ' Foo.meth(arg: int, kwarg=None)', + ' Foo.meth(arg: str, kwarg=None)', + ' Foo.meth(arg: dict, kwarg=None)', + ' :module: target.singledispatchmethod', + '', + ' A method for general use.', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_singledispatchmethod_automethod(app): + options = {} + actual = do_autodoc(app, 'method', 'target.singledispatchmethod.Foo.meth', options) + assert list(actual) == [ + '', + '.. py:method:: Foo.meth(arg, kwarg=None)', + ' Foo.meth(arg: float, kwarg=None)', + ' Foo.meth(arg: int, kwarg=None)', + ' Foo.meth(arg: str, kwarg=None)', + ' Foo.meth(arg: dict, kwarg=None)', + ' :module: target.singledispatchmethod', + '', + ' A method for general use.', + '', + ] + + +@pytest.mark.skipif(sys.version_info[:2] >= (3, 13), + reason='Cython does not support Python 3.13 yet.') +@pytest.mark.skipif(pyximport is None, reason='cython is not installed') +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_cython(app): + options = {"members": None, + "undoc-members": None} + actual = do_autodoc(app, 'module', 'target.cython', options) + assert list(actual) == [ + '', + '.. py:module:: target.cython', + '', + '', + '.. py:class:: Class()', + ' :module: target.cython', + '', + ' Docstring.', + '', + '', + ' .. py:method:: Class.meth(name: str, age: int = 0) -> None', + ' :module: target.cython', + '', + ' Docstring.', + '', + '', + '.. py:function:: foo(x: int, *args, y: str, **kwargs)', + ' :module: target.cython', + '', + ' Docstring.', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_final(app): + options = {"members": None} + actual = do_autodoc(app, 'module', 'target.final', options) + assert list(actual) == [ + '', + '.. py:module:: target.final', + '', + '', + '.. py:class:: Class()', + ' :module: target.final', + ' :final:', + '', + ' docstring', + '', + '', + ' .. py:method:: Class.meth1()', + ' :module: target.final', + ' :final:', + '', + ' docstring', + '', + '', + ' .. py:method:: Class.meth2()', + ' :module: target.final', + '', + ' docstring', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_overload(app): + options = {"members": None} + actual = do_autodoc(app, 'module', 'target.overload', options) + assert list(actual) == [ + '', + '.. py:module:: target.overload', + '', + '', + '.. py:class:: Bar(x: int, y: int)', + ' Bar(x: str, y: str)', + ' :module: target.overload', + '', + ' docstring', + '', + '', + '.. py:class:: Baz(x: int, y: int)', + ' Baz(x: str, y: str)', + ' :module: target.overload', + '', + ' docstring', + '', + '', + '.. py:class:: Foo(x: int, y: int)', + ' Foo(x: str, y: str)', + ' :module: target.overload', + '', + ' docstring', + '', + '', + '.. py:class:: Math()', + ' :module: target.overload', + '', + ' docstring', + '', + '', + ' .. py:method:: Math.sum(x: int, y: int = 0) -> int', + ' Math.sum(x: float, y: float = 0.0) -> float', + ' Math.sum(x: str, y: str = None) -> str', + ' :module: target.overload', + '', + ' docstring', + '', + '', + '.. py:function:: sum(x: int, y: int = 0) -> int', + ' sum(x: float, y: float = 0.0) -> float', + ' sum(x: str, y: str = None) -> str', + ' :module: target.overload', + '', + ' docstring', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_overload2(app): + options = {"members": None} + actual = do_autodoc(app, 'module', 'target.overload2', options) + assert list(actual) == [ + '', + '.. py:module:: target.overload2', + '', + '', + '.. py:class:: Baz(x: int, y: int)', + ' Baz(x: str, y: str)', + ' :module: target.overload2', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_pymodule_for_ModuleLevelDocumenter(app): + app.env.ref_context['py:module'] = 'target.classes' + actual = do_autodoc(app, 'class', 'Foo') + assert list(actual) == [ + '', + '.. py:class:: Foo()', + ' :module: target.classes', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_pymodule_for_ClassLevelDocumenter(app): + app.env.ref_context['py:module'] = 'target.methods' + actual = do_autodoc(app, 'method', 'Base.meth') + assert list(actual) == [ + '', + '.. py:method:: Base.meth()', + ' :module: target.methods', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_pyclass_for_ClassLevelDocumenter(app): + app.env.ref_context['py:module'] = 'target.methods' + app.env.ref_context['py:class'] = 'Base' + actual = do_autodoc(app, 'method', 'meth') + assert list(actual) == [ + '', + '.. py:method:: Base.meth()', + ' :module: target.methods', + '', + ] + + +@pytest.mark.sphinx('dummy', testroot='ext-autodoc') +def test_autodoc(app, status, warning): + app.builder.build_all() + + content = app.env.get_doctree('index') + assert isinstance(content[3], addnodes.desc) + assert content[3][0].astext() == 'autodoc_dummy_module.test()' + assert content[3][1].astext() == 'Dummy function using dummy.*' + + # issue sphinx-doc/sphinx#2437 + assert content[11][-1].astext() == """Dummy class Bar with alias. + + + +my_name + +alias of Foo""" + assert warning.getvalue() == '' + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_name_conflict(app): + actual = do_autodoc(app, 'class', 'target.name_conflict.foo') + assert list(actual) == [ + '', + '.. py:class:: foo()', + ' :module: target.name_conflict', + '', + ' docstring of target.name_conflict::foo.', + '', + ] + + actual = do_autodoc(app, 'class', 'target.name_conflict.foo.bar') + assert list(actual) == [ + '', + '.. py:class:: bar()', + ' :module: target.name_conflict.foo', + '', + ' docstring of target.name_conflict.foo::bar.', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_name_mangling(app): + options = {"members": None, + "undoc-members": None, + "private-members": None} + actual = do_autodoc(app, 'module', 'target.name_mangling', options) + assert list(actual) == [ + '', + '.. py:module:: target.name_mangling', + '', + '', + '.. py:class:: Bar()', + ' :module: target.name_mangling', + '', + '', + ' .. py:attribute:: Bar._Baz__email', + ' :module: target.name_mangling', + ' :value: None', + '', + ' a member having mangled-like name', + '', + '', + ' .. py:attribute:: Bar.__address', + ' :module: target.name_mangling', + ' :value: None', + '', + '', + '.. py:class:: Foo()', + ' :module: target.name_mangling', + '', + '', + ' .. py:attribute:: Foo.__age', + ' :module: target.name_mangling', + ' :value: None', + '', + '', + ' .. py:attribute:: Foo.__name', + ' :module: target.name_mangling', + ' :value: None', + '', + ' name of Foo', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_type_union_operator(app): + options = {'members': None} + actual = do_autodoc(app, 'module', 'target.pep604', options) + assert list(actual) == [ + '', + '.. py:module:: target.pep604', + '', + '', + '.. py:class:: Foo()', + ' :module: target.pep604', + '', + ' docstring', + '', + '', + ' .. py:attribute:: Foo.attr', + ' :module: target.pep604', + ' :type: int | str', + '', + ' docstring', + '', + '', + ' .. py:method:: Foo.meth(x: int | str, y: int | str) -> int | str', + ' :module: target.pep604', + '', + ' docstring', + '', + '', + '.. py:data:: attr', + ' :module: target.pep604', + ' :type: int | str', + '', + ' docstring', + '', + '', + '.. py:function:: sum(x: int | str, y: int | str) -> int | str', + ' :module: target.pep604', + '', + ' docstring', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_hide_value(app): + options = {'members': None} + actual = do_autodoc(app, 'module', 'target.hide_value', options) + assert list(actual) == [ + '', + '.. py:module:: target.hide_value', + '', + '', + '.. py:class:: Foo()', + ' :module: target.hide_value', + '', + ' docstring', + '', + '', + ' .. py:attribute:: Foo.SENTINEL1', + ' :module: target.hide_value', + '', + ' docstring', + '', + ' :meta hide-value:', + '', + '', + ' .. py:attribute:: Foo.SENTINEL2', + ' :module: target.hide_value', + '', + ' :meta hide-value:', + '', + '', + '.. py:data:: SENTINEL1', + ' :module: target.hide_value', + '', + ' docstring', + '', + ' :meta hide-value:', + '', + '', + '.. py:data:: SENTINEL2', + ' :module: target.hide_value', + '', + ' :meta hide-value:', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_canonical(app): + options = {'members': None, + 'imported-members': None} + actual = do_autodoc(app, 'module', 'target.canonical', options) + assert list(actual) == [ + '', + '.. py:module:: target.canonical', + '', + '', + '.. py:class:: Bar()', + ' :module: target.canonical', + '', + ' docstring', + '', + '', + '.. py:class:: Foo()', + ' :module: target.canonical', + ' :canonical: target.canonical.original.Foo', + '', + ' docstring', + '', + '', + ' .. py:method:: Foo.meth()', + ' :module: target.canonical', + '', + ' docstring', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_literal_render(app): + def bounded_typevar_rst(name, bound): + return [ + '', + f'.. py:class:: {name}', + ' :module: target.literal', + '', + ' docstring', + '', + f' alias of TypeVar({name!r}, bound={bound})', + '', + ] + + def function_rst(name, sig): + return [ + '', + f'.. py:function:: {name}({sig})', + ' :module: target.literal', + '', + ' docstring', + '', + ] + + # autodoc_typehints_format can take 'short' or 'fully-qualified' values + # and this will be interpreted as 'smart' or 'fully-qualified-except-typing' by restify() + # and 'smart' or 'fully-qualified' by stringify_annotation(). + + options = {'members': None, 'exclude-members': 'MyEnum'} + app.config.autodoc_typehints_format = 'short' + actual = do_autodoc(app, 'module', 'target.literal', options) + assert list(actual) == [ + '', + '.. py:module:: target.literal', + '', + *bounded_typevar_rst('T', r'\ :py:obj:`~typing.Literal`\ [1234]'), + *bounded_typevar_rst('U', r'\ :py:obj:`~typing.Literal`\ [:py:attr:`~target.literal.MyEnum.a`]'), + *function_rst('bar', 'x: ~typing.Literal[1234]'), + *function_rst('foo', 'x: ~typing.Literal[MyEnum.a]'), + ] + + # restify() assumes that 'fully-qualified' is 'fully-qualified-except-typing' + # because it is more likely that a user wants to suppress 'typing.*' + app.config.autodoc_typehints_format = 'fully-qualified' + actual = do_autodoc(app, 'module', 'target.literal', options) + assert list(actual) == [ + '', + '.. py:module:: target.literal', + '', + *bounded_typevar_rst('T', r'\ :py:obj:`~typing.Literal`\ [1234]'), + *bounded_typevar_rst('U', r'\ :py:obj:`~typing.Literal`\ [:py:attr:`target.literal.MyEnum.a`]'), + *function_rst('bar', 'x: typing.Literal[1234]'), + *function_rst('foo', 'x: typing.Literal[target.literal.MyEnum.a]'), + ] diff --git a/tests/test_ext_autodoc_autoattribute.py b/tests/test_ext_autodoc_autoattribute.py new file mode 100644 index 0000000..0424af0 --- /dev/null +++ b/tests/test_ext_autodoc_autoattribute.py @@ -0,0 +1,176 @@ +"""Test the autodoc extension. + +This tests mainly the Documenters; the auto directives are tested in a test +source file translated by test_build. +""" + +import pytest + +from .test_ext_autodoc import do_autodoc + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autoattribute(app): + actual = do_autodoc(app, 'attribute', 'target.Class.attr') + assert list(actual) == [ + '', + '.. py:attribute:: Class.attr', + ' :module: target', + " :value: 'bar'", + '', + ' should be documented -- süß', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autoattribute_novalue(app): + options = {'no-value': None} + actual = do_autodoc(app, 'attribute', 'target.Class.attr', options) + assert list(actual) == [ + '', + '.. py:attribute:: Class.attr', + ' :module: target', + '', + ' should be documented -- süß', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autoattribute_typed_variable(app): + actual = do_autodoc(app, 'attribute', 'target.typed_vars.Class.attr2') + assert list(actual) == [ + '', + '.. py:attribute:: Class.attr2', + ' :module: target.typed_vars', + ' :type: int', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autoattribute_typed_variable_in_alias(app): + actual = do_autodoc(app, 'attribute', 'target.typed_vars.Alias.attr2') + assert list(actual) == [ + '', + '.. py:attribute:: Alias.attr2', + ' :module: target.typed_vars', + ' :type: int', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autoattribute_instance_variable(app): + actual = do_autodoc(app, 'attribute', 'target.typed_vars.Class.attr4') + assert list(actual) == [ + '', + '.. py:attribute:: Class.attr4', + ' :module: target.typed_vars', + ' :type: int', + '', + ' attr4', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autoattribute_instance_variable_in_alias(app): + actual = do_autodoc(app, 'attribute', 'target.typed_vars.Alias.attr4') + assert list(actual) == [ + '', + '.. py:attribute:: Alias.attr4', + ' :module: target.typed_vars', + ' :type: int', + '', + ' attr4', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autoattribute_instance_variable_without_comment(app): + actual = do_autodoc(app, 'attribute', 'target.instance_variable.Bar.attr4') + assert list(actual) == [ + '', + '.. py:attribute:: Bar.attr4', + ' :module: target.instance_variable', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autoattribute_slots_variable_list(app): + actual = do_autodoc(app, 'attribute', 'target.slots.Foo.attr') + assert list(actual) == [ + '', + '.. py:attribute:: Foo.attr', + ' :module: target.slots', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autoattribute_slots_variable_dict(app): + actual = do_autodoc(app, 'attribute', 'target.slots.Bar.attr1') + assert list(actual) == [ + '', + '.. py:attribute:: Bar.attr1', + ' :module: target.slots', + ' :type: int', + '', + ' docstring of attr1', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autoattribute_slots_variable_str(app): + actual = do_autodoc(app, 'attribute', 'target.slots.Baz.attr') + assert list(actual) == [ + '', + '.. py:attribute:: Baz.attr', + ' :module: target.slots', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autoattribute_GenericAlias(app): + actual = do_autodoc(app, 'attribute', 'target.genericalias.Class.T') + assert list(actual) == [ + '', + '.. py:attribute:: Class.T', + ' :module: target.genericalias', + '', + ' A list of int', + '', + ' alias of :py:class:`~typing.List`\\ [:py:class:`int`]', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autoattribute_hide_value(app): + actual = do_autodoc(app, 'attribute', 'target.hide_value.Foo.SENTINEL1') + assert list(actual) == [ + '', + '.. py:attribute:: Foo.SENTINEL1', + ' :module: target.hide_value', + '', + ' docstring', + '', + ' :meta hide-value:', + '', + ] + + actual = do_autodoc(app, 'attribute', 'target.hide_value.Foo.SENTINEL2') + assert list(actual) == [ + '', + '.. py:attribute:: Foo.SENTINEL2', + ' :module: target.hide_value', + '', + ' :meta hide-value:', + '', + ] diff --git a/tests/test_ext_autodoc_autoclass.py b/tests/test_ext_autodoc_autoclass.py new file mode 100644 index 0000000..92c259a --- /dev/null +++ b/tests/test_ext_autodoc_autoclass.py @@ -0,0 +1,517 @@ +"""Test the autodoc extension. + +This tests mainly the Documenters; the auto directives are tested in a test +source file translated by test_build. +""" + +from __future__ import annotations + +import typing +from typing import Union + +import pytest + +from .test_ext_autodoc import do_autodoc + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_classes(app): + actual = do_autodoc(app, 'function', 'target.classes.Foo') + assert list(actual) == [ + '', + '.. py:function:: Foo()', + ' :module: target.classes', + '', + ] + + actual = do_autodoc(app, 'function', 'target.classes.Bar') + assert list(actual) == [ + '', + '.. py:function:: Bar(x, y)', + ' :module: target.classes', + '', + ] + + actual = do_autodoc(app, 'function', 'target.classes.Baz') + assert list(actual) == [ + '', + '.. py:function:: Baz(x, y)', + ' :module: target.classes', + '', + ] + + actual = do_autodoc(app, 'function', 'target.classes.Qux') + assert list(actual) == [ + '', + '.. py:function:: Qux(foo, bar)', + ' :module: target.classes', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_instance_variable(app): + options = {'members': None} + actual = do_autodoc(app, 'class', 'target.instance_variable.Bar', options) + assert list(actual) == [ + '', + '.. py:class:: Bar()', + ' :module: target.instance_variable', + '', + '', + ' .. py:attribute:: Bar.attr2', + ' :module: target.instance_variable', + '', + ' docstring bar', + '', + '', + ' .. py:attribute:: Bar.attr3', + ' :module: target.instance_variable', + '', + ' docstring bar', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_inherited_instance_variable(app): + options = {'members': None, + 'inherited-members': None} + actual = do_autodoc(app, 'class', 'target.instance_variable.Bar', options) + assert list(actual) == [ + '', + '.. py:class:: Bar()', + ' :module: target.instance_variable', + '', + '', + ' .. py:attribute:: Bar.attr1', + ' :module: target.instance_variable', + '', + ' docstring foo', + '', + '', + ' .. py:attribute:: Bar.attr2', + ' :module: target.instance_variable', + '', + ' docstring bar', + '', + '', + ' .. py:attribute:: Bar.attr3', + ' :module: target.instance_variable', + '', + ' docstring bar', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_uninitialized_attributes(app): + options = {"members": None, + "inherited-members": None} + actual = do_autodoc(app, 'class', 'target.uninitialized_attributes.Derived', options) + assert list(actual) == [ + '', + '.. py:class:: Derived()', + ' :module: target.uninitialized_attributes', + '', + '', + ' .. py:attribute:: Derived.attr1', + ' :module: target.uninitialized_attributes', + ' :type: int', + '', + ' docstring', + '', + '', + ' .. py:attribute:: Derived.attr3', + ' :module: target.uninitialized_attributes', + ' :type: int', + '', + ' docstring', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_undocumented_uninitialized_attributes(app): + options = {"members": None, + "inherited-members": None, + "undoc-members": None} + actual = do_autodoc(app, 'class', 'target.uninitialized_attributes.Derived', options) + assert list(actual) == [ + '', + '.. py:class:: Derived()', + ' :module: target.uninitialized_attributes', + '', + '', + ' .. py:attribute:: Derived.attr1', + ' :module: target.uninitialized_attributes', + ' :type: int', + '', + ' docstring', + '', + '', + ' .. py:attribute:: Derived.attr2', + ' :module: target.uninitialized_attributes', + ' :type: str', + '', + '', + ' .. py:attribute:: Derived.attr3', + ' :module: target.uninitialized_attributes', + ' :type: int', + '', + ' docstring', + '', + '', + ' .. py:attribute:: Derived.attr4', + ' :module: target.uninitialized_attributes', + ' :type: str', + '', + ] + + +def test_decorators(app): + actual = do_autodoc(app, 'class', 'target.decorator.Baz') + assert list(actual) == [ + '', + '.. py:class:: Baz(name=None, age=None)', + ' :module: target.decorator', + '', + ] + + actual = do_autodoc(app, 'class', 'target.decorator.Qux') + assert list(actual) == [ + '', + '.. py:class:: Qux(name=None, age=None)', + ' :module: target.decorator', + '', + ] + + actual = do_autodoc(app, 'class', 'target.decorator.Quux') + assert list(actual) == [ + '', + '.. py:class:: Quux(name=None, age=None)', + ' :module: target.decorator', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_properties(app): + options = {"members": None} + actual = do_autodoc(app, 'class', 'target.properties.Foo', options) + assert list(actual) == [ + '', + '.. py:class:: Foo()', + ' :module: target.properties', + '', + ' docstring', + '', + '', + ' .. py:property:: Foo.prop1', + ' :module: target.properties', + ' :type: int', + '', + ' docstring', + '', + '', + ' .. py:property:: Foo.prop1_with_type_comment', + ' :module: target.properties', + ' :type: int', + '', + ' docstring', + '', + '', + ' .. py:property:: Foo.prop2', + ' :module: target.properties', + ' :classmethod:', + ' :type: int', + '', + ' docstring', + '', + '', + ' .. py:property:: Foo.prop2_with_type_comment', + ' :module: target.properties', + ' :classmethod:', + ' :type: int', + '', + ' docstring', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_slots_attribute(app): + options = {"members": None} + actual = do_autodoc(app, 'class', 'target.slots.Bar', options) + assert list(actual) == [ + '', + '.. py:class:: Bar()', + ' :module: target.slots', + '', + ' docstring', + '', + '', + ' .. py:attribute:: Bar.attr1', + ' :module: target.slots', + ' :type: int', + '', + ' docstring of attr1', + '', + '', + ' .. py:attribute:: Bar.attr2', + ' :module: target.slots', + '', + ' docstring of instance attr2', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_show_inheritance_for_subclass_of_generic_type(app): + options = {'show-inheritance': None} + actual = do_autodoc(app, 'class', 'target.classes.Quux', options) + assert list(actual) == [ + '', + '.. py:class:: Quux(iterable=(), /)', + ' :module: target.classes', + '', + ' Bases: :py:class:`~typing.List`\\ ' + '[:py:obj:`~typing.Union`\\ [:py:class:`int`, :py:class:`float`]]', + '', + ' A subclass of List[Union[int, float]]', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_show_inheritance_for_decendants_of_generic_type(app): + options = {'show-inheritance': None} + actual = do_autodoc(app, 'class', 'target.classes.Corge', options) + assert list(actual) == [ + '', + '.. py:class:: Corge(iterable=(), /)', + ' :module: target.classes', + '', + ' Bases: :py:class:`~target.classes.Quux`', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_process_bases(app): + def autodoc_process_bases(app, name, obj, options, bases): + assert name == 'target.classes.Quux' + assert obj.__module__ == 'target.classes' + assert obj.__name__ == 'Quux' + assert options == {'show-inheritance': True, + 'members': []} + assert bases == [typing.List[Union[int, float]]] # NoQA: UP006 + + bases.pop() + bases.extend([int, str]) + + app.connect('autodoc-process-bases', autodoc_process_bases) + + options = {'show-inheritance': None} + actual = do_autodoc(app, 'class', 'target.classes.Quux', options) + assert list(actual) == [ + '', + '.. py:class:: Quux(iterable=(), /)', + ' :module: target.classes', + '', + ' Bases: :py:class:`int`, :py:class:`str`', + '', + ' A subclass of List[Union[int, float]]', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_class_doc_from_class(app): + options = {"members": None, + "class-doc-from": "class"} + actual = do_autodoc(app, 'class', 'target.autoclass_content.C', options) + assert list(actual) == [ + '', + '.. py:class:: C()', + ' :module: target.autoclass_content', + '', + ' A class having __init__, no __new__', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_class_doc_from_init(app): + options = {"members": None, + "class-doc-from": "init"} + actual = do_autodoc(app, 'class', 'target.autoclass_content.C', options) + assert list(actual) == [ + '', + '.. py:class:: C()', + ' :module: target.autoclass_content', + '', + ' __init__ docstring', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_class_doc_from_both(app): + options = {"members": None, + "class-doc-from": "both"} + actual = do_autodoc(app, 'class', 'target.autoclass_content.C', options) + assert list(actual) == [ + '', + '.. py:class:: C()', + ' :module: target.autoclass_content', + '', + ' A class having __init__, no __new__', + '', + ' __init__ docstring', + '', + ] + + +def test_class_alias(app): + def autodoc_process_docstring(*args): + """A handler always raises an error. + This confirms this handler is never called for class aliases. + """ + raise + + app.connect('autodoc-process-docstring', autodoc_process_docstring) + actual = do_autodoc(app, 'class', 'target.classes.Alias') + assert list(actual) == [ + '', + '.. py:attribute:: Alias', + ' :module: target.classes', + '', + ' alias of :py:class:`~target.classes.Foo`', + ] + + +def test_class_alias_having_doccomment(app): + actual = do_autodoc(app, 'class', 'target.classes.OtherAlias') + assert list(actual) == [ + '', + '.. py:attribute:: OtherAlias', + ' :module: target.classes', + '', + ' docstring', + '', + ] + + +def test_class_alias_for_imported_object_having_doccomment(app): + actual = do_autodoc(app, 'class', 'target.classes.IntAlias') + assert list(actual) == [ + '', + '.. py:attribute:: IntAlias', + ' :module: target.classes', + '', + ' docstring', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_coroutine(app): + options = {"members": None} + actual = do_autodoc(app, 'class', 'target.coroutine.AsyncClass', options) + assert list(actual) == [ + '', + '.. py:class:: AsyncClass()', + ' :module: target.coroutine', + '', + '', + ' .. py:method:: AsyncClass.do_asyncgen()', + ' :module: target.coroutine', + ' :async:', + '', + ' A documented async generator', + '', + '', + ' .. py:method:: AsyncClass.do_coroutine()', + ' :module: target.coroutine', + ' :async:', + '', + ' A documented coroutine function', + '', + '', + ' .. py:method:: AsyncClass.do_coroutine2()', + ' :module: target.coroutine', + ' :async:', + ' :classmethod:', + '', + ' A documented coroutine classmethod', + '', + '', + ' .. py:method:: AsyncClass.do_coroutine3()', + ' :module: target.coroutine', + ' :async:', + ' :staticmethod:', + '', + ' A documented coroutine staticmethod', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodata_NewType_module_level(app): + actual = do_autodoc(app, 'class', 'target.typevar.T6') + assert list(actual) == [ + '', + '.. py:class:: T6', + ' :module: target.typevar', + '', + ' T6', + '', + ' alias of :py:class:`~datetime.date`', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autoattribute_NewType_class_level(app): + actual = do_autodoc(app, 'class', 'target.typevar.Class.T6') + assert list(actual) == [ + '', + '.. py:class:: Class.T6', + ' :module: target.typevar', + '', + ' T6', + '', + ' alias of :py:class:`~datetime.date`', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodata_TypeVar_class_level(app): + actual = do_autodoc(app, 'class', 'target.typevar.T1') + assert list(actual) == [ + '', + '.. py:class:: T1', + ' :module: target.typevar', + '', + ' T1', + '', + " alias of TypeVar('T1')", + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autoattribute_TypeVar_module_level(app): + actual = do_autodoc(app, 'class', 'target.typevar.Class.T1') + assert list(actual) == [ + '', + '.. py:class:: Class.T1', + ' :module: target.typevar', + '', + ' T1', + '', + " alias of TypeVar('T1')", + '', + ] diff --git a/tests/test_ext_autodoc_autodata.py b/tests/test_ext_autodoc_autodata.py new file mode 100644 index 0000000..83647d9 --- /dev/null +++ b/tests/test_ext_autodoc_autodata.py @@ -0,0 +1,106 @@ +"""Test the autodoc extension. + +This tests mainly the Documenters; the auto directives are tested in a test +source file translated by test_build. +""" + +import pytest + +from .test_ext_autodoc import do_autodoc + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodata(app): + actual = do_autodoc(app, 'data', 'target.integer') + assert list(actual) == [ + '', + '.. py:data:: integer', + ' :module: target', + ' :value: 1', + '', + ' documentation for the integer', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodata_novalue(app): + options = {'no-value': None} + actual = do_autodoc(app, 'data', 'target.integer', options) + assert list(actual) == [ + '', + '.. py:data:: integer', + ' :module: target', + '', + ' documentation for the integer', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodata_typed_variable(app): + actual = do_autodoc(app, 'data', 'target.typed_vars.attr2') + assert list(actual) == [ + '', + '.. py:data:: attr2', + ' :module: target.typed_vars', + ' :type: str', + '', + ' attr2', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodata_type_comment(app): + actual = do_autodoc(app, 'data', 'target.typed_vars.attr3') + assert list(actual) == [ + '', + '.. py:data:: attr3', + ' :module: target.typed_vars', + ' :type: str', + " :value: ''", + '', + ' attr3', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodata_GenericAlias(app): + actual = do_autodoc(app, 'data', 'target.genericalias.T') + assert list(actual) == [ + '', + '.. py:data:: T', + ' :module: target.genericalias', + '', + ' A list of int', + '', + ' alias of :py:class:`~typing.List`\\ [:py:class:`int`]', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodata_hide_value(app): + actual = do_autodoc(app, 'data', 'target.hide_value.SENTINEL1') + assert list(actual) == [ + '', + '.. py:data:: SENTINEL1', + ' :module: target.hide_value', + '', + ' docstring', + '', + ' :meta hide-value:', + '', + ] + + actual = do_autodoc(app, 'data', 'target.hide_value.SENTINEL2') + assert list(actual) == [ + '', + '.. py:data:: SENTINEL2', + ' :module: target.hide_value', + '', + ' :meta hide-value:', + '', + ] diff --git a/tests/test_ext_autodoc_autofunction.py b/tests/test_ext_autodoc_autofunction.py new file mode 100644 index 0000000..b0cd7d9 --- /dev/null +++ b/tests/test_ext_autodoc_autofunction.py @@ -0,0 +1,201 @@ +"""Test the autodoc extension. + +This tests mainly the Documenters; the auto directives are tested in a test +source file translated by test_build. +""" + +import pytest + +from .test_ext_autodoc import do_autodoc + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_classes(app): + actual = do_autodoc(app, 'function', 'target.classes.Foo') + assert list(actual) == [ + '', + '.. py:function:: Foo()', + ' :module: target.classes', + '', + ] + + actual = do_autodoc(app, 'function', 'target.classes.Bar') + assert list(actual) == [ + '', + '.. py:function:: Bar(x, y)', + ' :module: target.classes', + '', + ] + + actual = do_autodoc(app, 'function', 'target.classes.Baz') + assert list(actual) == [ + '', + '.. py:function:: Baz(x, y)', + ' :module: target.classes', + '', + ] + + actual = do_autodoc(app, 'function', 'target.classes.Qux') + assert list(actual) == [ + '', + '.. py:function:: Qux(foo, bar)', + ' :module: target.classes', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_callable(app): + actual = do_autodoc(app, 'function', 'target.callable.function') + assert list(actual) == [ + '', + '.. py:function:: function(arg1, arg2, **kwargs)', + ' :module: target.callable', + '', + ' A callable object that behaves like a function.', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_method(app): + actual = do_autodoc(app, 'function', 'target.callable.method') + assert list(actual) == [ + '', + '.. py:function:: method(arg1, arg2)', + ' :module: target.callable', + '', + ' docstring of Callable.method().', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_builtin_function(app): + actual = do_autodoc(app, 'function', 'os.umask') + assert list(actual) == [ + '', + '.. py:function:: umask(mask, /)', + ' :module: os', + '', + ' Set the current numeric umask and return the previous umask.', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_methoddescriptor(app): + actual = do_autodoc(app, 'function', 'builtins.int.__add__') + assert list(actual) == [ + '', + '.. py:function:: __add__(self, value, /)', + ' :module: builtins.int', + '', + ' Return self+value.', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_decorated(app): + actual = do_autodoc(app, 'function', 'target.decorator.foo') + assert list(actual) == [ + '', + '.. py:function:: foo(name=None, age=None)', + ' :module: target.decorator', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_singledispatch(app): + options = {} + actual = do_autodoc(app, 'function', 'target.singledispatch.func', options) + assert list(actual) == [ + '', + '.. py:function:: func(arg, kwarg=None)', + ' func(arg: float, kwarg=None)', + ' func(arg: int, kwarg=None)', + ' func(arg: str, kwarg=None)', + ' func(arg: dict, kwarg=None)', + ' :module: target.singledispatch', + '', + ' A function for general use.', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_cfunction(app): + actual = do_autodoc(app, 'function', 'time.asctime') + assert list(actual) == [ + '', + '.. py:function:: asctime([tuple]) -> string', + ' :module: time', + '', + " Convert a time tuple to a string, e.g. 'Sat Jun 06 16:26:11 1998'.", + ' When the time tuple is not present, current time as returned by localtime()', + ' is used.', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_wrapped_function(app): + actual = do_autodoc(app, 'function', 'target.wrappedfunction.slow_function') + assert list(actual) == [ + '', + '.. py:function:: slow_function(message, timeout)', + ' :module: target.wrappedfunction', + '', + ' This function is slow.', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_wrapped_function_contextmanager(app): + actual = do_autodoc(app, 'function', 'target.wrappedfunction.feeling_good') + assert list(actual) == [ + '', + '.. py:function:: feeling_good(x: int, y: int) -> ~typing.Generator', + ' :module: target.wrappedfunction', + '', + " You'll feel better in this context!", + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_coroutine(app): + actual = do_autodoc(app, 'function', 'target.functions.coroutinefunc') + assert list(actual) == [ + '', + '.. py:function:: coroutinefunc()', + ' :module: target.functions', + ' :async:', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_synchronized_coroutine(app): + actual = do_autodoc(app, 'function', 'target.coroutine.sync_func') + assert list(actual) == [ + '', + '.. py:function:: sync_func()', + ' :module: target.coroutine', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_async_generator(app): + actual = do_autodoc(app, 'function', 'target.functions.asyncgenerator') + assert list(actual) == [ + '', + '.. py:function:: asyncgenerator()', + ' :module: target.functions', + ' :async:', + '', + ] diff --git a/tests/test_ext_autodoc_automodule.py b/tests/test_ext_autodoc_automodule.py new file mode 100644 index 0000000..2855020 --- /dev/null +++ b/tests/test_ext_autodoc_automodule.py @@ -0,0 +1,192 @@ +"""Test the autodoc extension. + +This tests mainly the Documenters; the auto directives are tested in a test +source file translated by test_build. +""" + +import sys + +import pytest + +from .test_ext_autodoc import do_autodoc + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_empty_all(app): + options = {'members': None} + actual = do_autodoc(app, 'module', 'target.empty_all', options) + assert list(actual) == [ + '', + '.. py:module:: target.empty_all', + '', + ' docsting of empty_all module.', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_automodule(app): + options = {'members': None} + actual = do_autodoc(app, 'module', 'target.module', options) + assert list(actual) == [ + '', + '.. py:module:: target.module', + '', + '', + '.. py:data:: annotated', + ' :module: target.module', + ' :type: int', + '', + ' docstring', + '', + '', + '.. py:data:: documented', + ' :module: target.module', + ' :value: 1', + '', + ' docstring', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_automodule_undoc_members(app): + options = {'members': None, + 'undoc-members': None} + actual = do_autodoc(app, 'module', 'target.module', options) + assert list(actual) == [ + '', + '.. py:module:: target.module', + '', + '', + '.. py:data:: annotated', + ' :module: target.module', + ' :type: int', + '', + ' docstring', + '', + '', + '.. py:data:: documented', + ' :module: target.module', + ' :value: 1', + '', + ' docstring', + '', + '', + '.. py:data:: undoc_annotated', + ' :module: target.module', + ' :type: int', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_automodule_special_members(app): + options = {'members': None, + 'special-members': None} + actual = do_autodoc(app, 'module', 'target.module', options) + assert list(actual) == [ + '', + '.. py:module:: target.module', + '', + '', + '.. py:data:: __documented_special__', + ' :module: target.module', + ' :value: 1', + '', + ' docstring', + '', + '', + '.. py:data:: annotated', + ' :module: target.module', + ' :type: int', + '', + ' docstring', + '', + '', + '.. py:data:: documented', + ' :module: target.module', + ' :value: 1', + '', + ' docstring', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_automodule_inherited_members(app): + options = {'members': None, + 'undoc-members': None, + 'inherited-members': 'Base, list'} + actual = do_autodoc(app, 'module', 'target.inheritance', options) + assert list(actual) == [ + '', + '.. py:module:: target.inheritance', + '', + '', + '.. py:class:: Base()', + ' :module: target.inheritance', + '', + '', + ' .. py:attribute:: Base.inheritedattr', + ' :module: target.inheritance', + ' :value: None', + '', + ' docstring', + '', + '', + ' .. py:method:: Base.inheritedclassmeth()', + ' :module: target.inheritance', + ' :classmethod:', + '', + ' Inherited class method.', + '', + '', + ' .. py:method:: Base.inheritedmeth()', + ' :module: target.inheritance', + '', + ' Inherited function.', + '', + '', + ' .. py:method:: Base.inheritedstaticmeth(cls)', + ' :module: target.inheritance', + ' :staticmethod:', + '', + ' Inherited static method.', + '', + '', + '.. py:class:: Derived()', + ' :module: target.inheritance', + '', + '', + ' .. py:method:: Derived.inheritedmeth()', + ' :module: target.inheritance', + '', + ' Inherited function.', + '', + '', + '.. py:class:: MyList(iterable=(), /)', + ' :module: target.inheritance', + '', + '', + ' .. py:method:: MyList.meth()', + ' :module: target.inheritance', + '', + ' docstring', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc', + confoverrides={'autodoc_mock_imports': ['missing_module', + 'missing_package1', + 'missing_package2', + 'missing_package3', + 'sphinx.missing_module4']}) +@pytest.mark.usefixtures("rollback_sysmodules") +def test_subclass_of_mocked_object(app): + sys.modules.pop('target', None) # unload target module to clear the module cache + + options = {'members': None} + actual = do_autodoc(app, 'module', 'target.need_mocks', options) + assert '.. py:class:: Inherited(*args: ~typing.Any, **kwargs: ~typing.Any)' in actual diff --git a/tests/test_ext_autodoc_autoproperty.py b/tests/test_ext_autodoc_autoproperty.py new file mode 100644 index 0000000..ca8b981 --- /dev/null +++ b/tests/test_ext_autodoc_autoproperty.py @@ -0,0 +1,91 @@ +"""Test the autodoc extension. + +This tests mainly the Documenters; the auto directives are tested in a test +source file translated by test_build. +""" + +import pytest + +from .test_ext_autodoc import do_autodoc + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_properties(app): + actual = do_autodoc(app, 'property', 'target.properties.Foo.prop1') + assert list(actual) == [ + '', + '.. py:property:: Foo.prop1', + ' :module: target.properties', + ' :type: int', + '', + ' docstring', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_class_properties(app): + actual = do_autodoc(app, 'property', 'target.properties.Foo.prop2') + assert list(actual) == [ + '', + '.. py:property:: Foo.prop2', + ' :module: target.properties', + ' :classmethod:', + ' :type: int', + '', + ' docstring', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_properties_with_type_comment(app): + actual = do_autodoc(app, 'property', 'target.properties.Foo.prop1_with_type_comment') + assert list(actual) == [ + '', + '.. py:property:: Foo.prop1_with_type_comment', + ' :module: target.properties', + ' :type: int', + '', + ' docstring', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_class_properties_with_type_comment(app): + actual = do_autodoc(app, 'property', 'target.properties.Foo.prop2_with_type_comment') + assert list(actual) == [ + '', + '.. py:property:: Foo.prop2_with_type_comment', + ' :module: target.properties', + ' :classmethod:', + ' :type: int', + '', + ' docstring', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_cached_properties(app): + actual = do_autodoc(app, 'property', 'target.cached_property.Foo.prop') + assert list(actual) == [ + '', + '.. py:property:: Foo.prop', + ' :module: target.cached_property', + ' :type: int', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_cached_properties_with_type_comment(app): + actual = do_autodoc(app, 'property', 'target.cached_property.Foo.prop_with_type_comment') + assert list(actual) == [ + '', + '.. py:property:: Foo.prop_with_type_comment', + ' :module: target.cached_property', + ' :type: int', + '', + ] diff --git a/tests/test_ext_autodoc_configs.py b/tests/test_ext_autodoc_configs.py new file mode 100644 index 0000000..45bc729 --- /dev/null +++ b/tests/test_ext_autodoc_configs.py @@ -0,0 +1,1727 @@ +"""Test the autodoc extension. This tests mainly for config variables""" + +import platform +import sys +from contextlib import contextmanager + +import pytest + +from sphinx.testing import restructuredtext + +from .test_ext_autodoc import do_autodoc + +IS_PYPY = platform.python_implementation() == 'PyPy' + + +@contextmanager +def overwrite_file(path, content): + current_content = path.read_bytes() if path.exists() else None + try: + path.write_text(content, encoding='utf-8') + yield + finally: + if current_content is not None: + path.write_bytes(current_content) + else: + path.unlink() + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autoclass_content_class(app): + app.config.autoclass_content = 'class' + options = {"members": None} + actual = do_autodoc(app, 'module', 'target.autoclass_content', options) + assert list(actual) == [ + '', + '.. py:module:: target.autoclass_content', + '', + '', + '.. py:class:: A()', + ' :module: target.autoclass_content', + '', + ' A class having no __init__, no __new__', + '', + '', + '.. py:class:: B()', + ' :module: target.autoclass_content', + '', + ' A class having __init__(no docstring), no __new__', + '', + '', + '.. py:class:: C()', + ' :module: target.autoclass_content', + '', + ' A class having __init__, no __new__', + '', + '', + '.. py:class:: D()', + ' :module: target.autoclass_content', + '', + ' A class having no __init__, __new__(no docstring)', + '', + '', + '.. py:class:: E()', + ' :module: target.autoclass_content', + '', + ' A class having no __init__, __new__', + '', + '', + '.. py:class:: F()', + ' :module: target.autoclass_content', + '', + ' A class having both __init__ and __new__', + '', + '', + '.. py:class:: G()', + ' :module: target.autoclass_content', + '', + ' A class inherits __init__ without docstring.', + '', + '', + '.. py:class:: H()', + ' :module: target.autoclass_content', + '', + ' A class inherits __new__ without docstring.', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autoclass_content_init(app): + app.config.autoclass_content = 'init' + options = {"members": None} + actual = do_autodoc(app, 'module', 'target.autoclass_content', options) + assert list(actual) == [ + '', + '.. py:module:: target.autoclass_content', + '', + '', + '.. py:class:: A()', + ' :module: target.autoclass_content', + '', + ' A class having no __init__, no __new__', + '', + '', + '.. py:class:: B()', + ' :module: target.autoclass_content', + '', + ' A class having __init__(no docstring), no __new__', + '', + '', + '.. py:class:: C()', + ' :module: target.autoclass_content', + '', + ' __init__ docstring', + '', + '', + '.. py:class:: D()', + ' :module: target.autoclass_content', + '', + ' A class having no __init__, __new__(no docstring)', + '', + '', + '.. py:class:: E()', + ' :module: target.autoclass_content', + '', + ' __new__ docstring', + '', + '', + '.. py:class:: F()', + ' :module: target.autoclass_content', + '', + ' __init__ docstring', + '', + '', + '.. py:class:: G()', + ' :module: target.autoclass_content', + '', + ' __init__ docstring', + '', + '', + '.. py:class:: H()', + ' :module: target.autoclass_content', + '', + ' __new__ docstring', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_class_signature_mixed(app): + app.config.autodoc_class_signature = 'mixed' + options = {"members": None, + "undoc-members": None} + actual = do_autodoc(app, 'class', 'target.classes.Bar', options) + assert list(actual) == [ + '', + '.. py:class:: Bar(x, y)', + ' :module: target.classes', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_class_signature_separated_init(app): + app.config.autodoc_class_signature = 'separated' + options = {"members": None, + "undoc-members": None} + actual = do_autodoc(app, 'class', 'target.classes.Bar', options) + assert list(actual) == [ + '', + '.. py:class:: Bar', + ' :module: target.classes', + '', + '', + ' .. py:method:: Bar.__init__(x, y)', + ' :module: target.classes', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_class_signature_separated_new(app): + app.config.autodoc_class_signature = 'separated' + options = {"members": None, + "undoc-members": None} + actual = do_autodoc(app, 'class', 'target.classes.Baz', options) + assert list(actual) == [ + '', + '.. py:class:: Baz', + ' :module: target.classes', + '', + '', + ' .. py:method:: Baz.__new__(cls, x, y)', + ' :module: target.classes', + ' :staticmethod:', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autoclass_content_both(app): + app.config.autoclass_content = 'both' + options = {"members": None} + actual = do_autodoc(app, 'module', 'target.autoclass_content', options) + assert list(actual) == [ + '', + '.. py:module:: target.autoclass_content', + '', + '', + '.. py:class:: A()', + ' :module: target.autoclass_content', + '', + ' A class having no __init__, no __new__', + '', + '', + '.. py:class:: B()', + ' :module: target.autoclass_content', + '', + ' A class having __init__(no docstring), no __new__', + '', + '', + '.. py:class:: C()', + ' :module: target.autoclass_content', + '', + ' A class having __init__, no __new__', + '', + ' __init__ docstring', + '', + '', + '.. py:class:: D()', + ' :module: target.autoclass_content', + '', + ' A class having no __init__, __new__(no docstring)', + '', + '', + '.. py:class:: E()', + ' :module: target.autoclass_content', + '', + ' A class having no __init__, __new__', + '', + ' __new__ docstring', + '', + '', + '.. py:class:: F()', + ' :module: target.autoclass_content', + '', + ' A class having both __init__ and __new__', + '', + ' __init__ docstring', + '', + '', + '.. py:class:: G()', + ' :module: target.autoclass_content', + '', + ' A class inherits __init__ without docstring.', + '', + ' __init__ docstring', + '', + '', + '.. py:class:: H()', + ' :module: target.autoclass_content', + '', + ' A class inherits __new__ without docstring.', + '', + ' __new__ docstring', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_inherit_docstrings(app): + assert app.config.autodoc_inherit_docstrings is True # default + actual = do_autodoc(app, 'method', 'target.inheritance.Derived.inheritedmeth') + assert list(actual) == [ + '', + '.. py:method:: Derived.inheritedmeth()', + ' :module: target.inheritance', + '', + ' Inherited function.', + '', + ] + + # disable autodoc_inherit_docstrings + app.config.autodoc_inherit_docstrings = False + actual = do_autodoc(app, 'method', 'target.inheritance.Derived.inheritedmeth') + assert list(actual) == [ + '', + '.. py:method:: Derived.inheritedmeth()', + ' :module: target.inheritance', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_inherit_docstrings_for_inherited_members(app): + options = {"members": None, + "inherited-members": None} + + assert app.config.autodoc_inherit_docstrings is True # default + actual = do_autodoc(app, 'class', 'target.inheritance.Derived', options) + assert list(actual) == [ + '', + '.. py:class:: Derived()', + ' :module: target.inheritance', + '', + '', + ' .. py:attribute:: Derived.inheritedattr', + ' :module: target.inheritance', + ' :value: None', + '', + ' docstring', + '', + '', + ' .. py:method:: Derived.inheritedclassmeth()', + ' :module: target.inheritance', + ' :classmethod:', + '', + ' Inherited class method.', + '', + '', + ' .. py:method:: Derived.inheritedmeth()', + ' :module: target.inheritance', + '', + ' Inherited function.', + '', + '', + ' .. py:method:: Derived.inheritedstaticmeth(cls)', + ' :module: target.inheritance', + ' :staticmethod:', + '', + ' Inherited static method.', + '', + ] + + # disable autodoc_inherit_docstrings + app.config.autodoc_inherit_docstrings = False + actual = do_autodoc(app, 'class', 'target.inheritance.Derived', options) + assert list(actual) == [ + '', + '.. py:class:: Derived()', + ' :module: target.inheritance', + '', + '', + ' .. py:method:: Derived.inheritedclassmeth()', + ' :module: target.inheritance', + ' :classmethod:', + '', + ' Inherited class method.', + '', + '', + ' .. py:method:: Derived.inheritedstaticmeth(cls)', + ' :module: target.inheritance', + ' :staticmethod:', + '', + ' Inherited static method.', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_docstring_signature(app): + options = {"members": None, "special-members": "__init__, __new__"} + actual = do_autodoc(app, 'class', 'target.DocstringSig', options) + assert list(actual) == [ + '', + # FIXME: Ideally this would instead be: `DocstringSig(d, e=1)` but + # currently `ClassDocumenter` does not apply the docstring signature + # logic when extracting a signature from a __new__ or __init__ method. + '.. py:class:: DocstringSig(*new_args, **new_kwargs)', + ' :module: target', + '', + '', + ' .. py:method:: DocstringSig.__init__(self, a, b=1) -> None', + ' :module: target', + '', + ' First line of docstring', + '', + ' rest of docstring', + '', + '', + ' .. py:method:: DocstringSig.__new__(cls, d, e=1) -> DocstringSig', + ' :module: target', + ' :staticmethod:', + '', + ' First line of docstring', + '', + ' rest of docstring', + '', + '', + ' .. py:method:: DocstringSig.meth(FOO, BAR=1) -> BAZ', + ' :module: target', + '', + ' First line of docstring', + '', + ' rest of docstring', + '', + '', + ' .. py:method:: DocstringSig.meth2()', + ' :module: target', + '', + ' First line, no signature', + ' Second line followed by indentation::', + '', + ' indented line', + '', + '', + ' .. py:property:: DocstringSig.prop1', + ' :module: target', + '', + ' First line of docstring', + '', + '', + ' .. py:property:: DocstringSig.prop2', + ' :module: target', + '', + ' First line of docstring', + ' Second line of docstring', + '', + ] + + # disable autodoc_docstring_signature + app.config.autodoc_docstring_signature = False + actual = do_autodoc(app, 'class', 'target.DocstringSig', options) + assert list(actual) == [ + '', + '.. py:class:: DocstringSig(*new_args, **new_kwargs)', + ' :module: target', + '', + '', + ' .. py:method:: DocstringSig.__init__(*init_args, **init_kwargs)', + ' :module: target', + '', + ' __init__(self, a, b=1) -> None', + ' First line of docstring', + '', + ' rest of docstring', + '', + '', + '', + ' .. py:method:: DocstringSig.__new__(cls, *new_args, **new_kwargs)', + ' :module: target', + ' :staticmethod:', + '', + ' __new__(cls, d, e=1) -> DocstringSig', + ' First line of docstring', + '', + ' rest of docstring', + '', + '', + '', + ' .. py:method:: DocstringSig.meth()', + ' :module: target', + '', + ' meth(FOO, BAR=1) -> BAZ', + ' First line of docstring', + '', + ' rest of docstring', + '', + '', + '', + ' .. py:method:: DocstringSig.meth2()', + ' :module: target', + '', + ' First line, no signature', + ' Second line followed by indentation::', + '', + ' indented line', + '', + '', + ' .. py:property:: DocstringSig.prop1', + ' :module: target', + '', + ' DocstringSig.prop1(self)', + ' First line of docstring', + '', + '', + ' .. py:property:: DocstringSig.prop2', + ' :module: target', + '', + ' First line of docstring', + ' Second line of docstring', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autoclass_content_and_docstring_signature_class(app): + app.config.autoclass_content = 'class' + options = {"members": None, + "undoc-members": None} + actual = do_autodoc(app, 'module', 'target.docstring_signature', options) + assert list(actual) == [ + '', + '.. py:module:: target.docstring_signature', + '', + '', + '.. py:class:: A(foo, bar)', + ' :module: target.docstring_signature', + '', + '', + '.. py:class:: B(foo, bar)', + ' :module: target.docstring_signature', + '', + '', + '.. py:class:: C(foo, bar)', + ' :module: target.docstring_signature', + '', + '', + '.. py:class:: D()', + ' :module: target.docstring_signature', + '', + '', + '.. py:class:: E()', + ' :module: target.docstring_signature', + '', + '', + '.. py:class:: F()', + ' :module: target.docstring_signature', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autoclass_content_and_docstring_signature_init(app): + app.config.autoclass_content = 'init' + options = {"members": None, + "undoc-members": None} + actual = do_autodoc(app, 'module', 'target.docstring_signature', options) + assert list(actual) == [ + '', + '.. py:module:: target.docstring_signature', + '', + '', + '.. py:class:: A(foo, bar)', + ' :module: target.docstring_signature', + '', + '', + '.. py:class:: B(foo, bar, baz)', + ' :module: target.docstring_signature', + '', + '', + '.. py:class:: C(foo, bar, baz)', + ' :module: target.docstring_signature', + '', + '', + '.. py:class:: D(foo, bar, baz)', + ' :module: target.docstring_signature', + '', + '', + '.. py:class:: E(foo: int, bar: int, baz: int)', + ' E(foo: str, bar: str, baz: str)', + ' E(foo: float, bar: float, baz: float)', + ' :module: target.docstring_signature', + '', + '', + '.. py:class:: F(foo: int, bar: int, baz: int)', + ' F(foo: str, bar: str, baz: str)', + ' F(foo: float, bar: float, baz: float)', + ' :module: target.docstring_signature', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autoclass_content_and_docstring_signature_both(app): + app.config.autoclass_content = 'both' + options = {"members": None, + "undoc-members": None} + actual = do_autodoc(app, 'module', 'target.docstring_signature', options) + assert list(actual) == [ + '', + '.. py:module:: target.docstring_signature', + '', + '', + '.. py:class:: A(foo, bar)', + ' :module: target.docstring_signature', + '', + '', + '.. py:class:: B(foo, bar)', + ' :module: target.docstring_signature', + '', + ' B(foo, bar, baz)', + '', + '', + '.. py:class:: C(foo, bar)', + ' :module: target.docstring_signature', + '', + ' C(foo, bar, baz)', + '', + '', + '.. py:class:: D(foo, bar, baz)', + ' :module: target.docstring_signature', + '', + '', + '.. py:class:: E(foo: int, bar: int, baz: int)', + ' E(foo: str, bar: str, baz: str)', + ' E(foo: float, bar: float, baz: float)', + ' :module: target.docstring_signature', + '', + '', + '.. py:class:: F(foo: int, bar: int, baz: int)', + ' F(foo: str, bar: str, baz: str)', + ' F(foo: float, bar: float, baz: float)', + ' :module: target.docstring_signature', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +@pytest.mark.usefixtures("rollback_sysmodules") +def test_mocked_module_imports(app, warning): + sys.modules.pop('target', None) # unload target module to clear the module cache + + # no autodoc_mock_imports + options = {"members": 'TestAutodoc,decoratedFunction,func,Alias'} + actual = do_autodoc(app, 'module', 'target.need_mocks', options) + assert list(actual) == [] + assert "autodoc: failed to import module 'need_mocks'" in warning.getvalue() + + # with autodoc_mock_imports + app.config.autodoc_mock_imports = [ + 'missing_module', + 'missing_package1', + 'missing_package2', + 'missing_package3', + 'sphinx.missing_module4', + ] + + warning.truncate(0) + actual = do_autodoc(app, 'module', 'target.need_mocks', options) + assert list(actual) == [ + '', + '.. py:module:: target.need_mocks', + '', + '', + '.. py:data:: Alias', + ' :module: target.need_mocks', + '', + ' docstring', + '', + '', + '.. py:class:: TestAutodoc()', + ' :module: target.need_mocks', + '', + ' TestAutodoc docstring.', + '', + '', + ' .. py:attribute:: TestAutodoc.Alias', + ' :module: target.need_mocks', + '', + ' docstring', + '', + '', + ' .. py:method:: TestAutodoc.decoratedMethod()', + ' :module: target.need_mocks', + '', + ' TestAutodoc::decoratedMethod docstring', + '', + '', + '.. py:function:: decoratedFunction()', + ' :module: target.need_mocks', + '', + ' decoratedFunction docstring', + '', + '', + '.. py:function:: func(arg: missing_module.Class)', + ' :module: target.need_mocks', + '', + ' a function takes mocked object as an argument', + '', + ] + assert warning.getvalue() == '' + + +@pytest.mark.sphinx('html', testroot='ext-autodoc', + confoverrides={'autodoc_typehints': "signature"}) +def test_autodoc_typehints_signature(app): + if sys.version_info[:2] <= (3, 10): + type_o = "~typing.Any | None" + else: + type_o = "~typing.Any" + + options = {"members": None, + "undoc-members": None} + actual = do_autodoc(app, 'module', 'target.typehints', options) + assert list(actual) == [ + '', + '.. py:module:: target.typehints', + '', + '', + '.. py:data:: CONST1', + ' :module: target.typehints', + ' :type: int', + '', + '', + '.. py:data:: CONST2', + ' :module: target.typehints', + ' :type: int', + ' :value: 1', + '', + ' docstring', + '', + '', + '.. py:data:: CONST3', + ' :module: target.typehints', + ' :type: ~pathlib.PurePosixPath', + " :value: PurePosixPath('/a/b/c')", + '', + ' docstring', + '', + '', + '.. py:class:: Math(s: str, o: %s = None)' % type_o, + ' :module: target.typehints', + '', + '', + ' .. py:attribute:: Math.CONST1', + ' :module: target.typehints', + ' :type: int', + '', + '', + ' .. py:attribute:: Math.CONST2', + ' :module: target.typehints', + ' :type: int', + ' :value: 1', + '', + '', + ' .. py:attribute:: Math.CONST3', + ' :module: target.typehints', + ' :type: ~pathlib.PurePosixPath', + " :value: PurePosixPath('/a/b/c')", + '', + '', + ' .. py:method:: Math.decr(a: int, b: int = 1) -> int', + ' :module: target.typehints', + '', + '', + ' .. py:method:: Math.horse(a: str, b: int) -> None', + ' :module: target.typehints', + '', + '', + ' .. py:method:: Math.incr(a: int, b: int = 1) -> int', + ' :module: target.typehints', + '', + '', + ' .. py:method:: Math.nothing() -> None', + ' :module: target.typehints', + '', + '', + ' .. py:property:: Math.path', + ' :module: target.typehints', + ' :type: ~pathlib.PurePosixPath', + '', + '', + ' .. py:property:: Math.prop', + ' :module: target.typehints', + ' :type: int', + '', + '', + '.. py:class:: NewAnnotation(i: int)', + ' :module: target.typehints', + '', + '', + '.. py:class:: NewComment(i: int)', + ' :module: target.typehints', + '', + '', + '.. py:class:: SignatureFromMetaclass(a: int)', + ' :module: target.typehints', + '', + '', + '.. py:class:: T', + ' :module: target.typehints', + '', + ' docstring', + '', + " alias of TypeVar('T', bound=\\ :py:class:`~pathlib.PurePosixPath`)", + '', + '', + '.. py:function:: complex_func(arg1: str, arg2: List[int], arg3: Tuple[int, ' + 'Union[str, Unknown]] = None, *args: str, **kwargs: str) -> None', + ' :module: target.typehints', + '', + '', + '.. py:function:: decr(a: int, b: int = 1) -> int', + ' :module: target.typehints', + '', + '', + '.. py:function:: incr(a: int, b: int = 1) -> int', + ' :module: target.typehints', + '', + '', + '.. py:function:: missing_attr(c, a: str, b: Optional[str] = None) -> str', + ' :module: target.typehints', + '', + '', + '.. py:function:: tuple_args(x: tuple[int, int | str]) -> tuple[int, int]', + ' :module: target.typehints', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc', + confoverrides={'autodoc_typehints': "none"}) +def test_autodoc_typehints_none(app): + options = {"members": None, + "undoc-members": None} + actual = do_autodoc(app, 'module', 'target.typehints', options) + assert list(actual) == [ + '', + '.. py:module:: target.typehints', + '', + '', + '.. py:data:: CONST1', + ' :module: target.typehints', + '', + '', + '.. py:data:: CONST2', + ' :module: target.typehints', + ' :value: 1', + '', + ' docstring', + '', + '', + '.. py:data:: CONST3', + ' :module: target.typehints', + " :value: PurePosixPath('/a/b/c')", + '', + ' docstring', + '', + '', + '.. py:class:: Math(s, o=None)', + ' :module: target.typehints', + '', + '', + ' .. py:attribute:: Math.CONST1', + ' :module: target.typehints', + '', + '', + ' .. py:attribute:: Math.CONST2', + ' :module: target.typehints', + ' :value: 1', + '', + '', + ' .. py:attribute:: Math.CONST3', + ' :module: target.typehints', + " :value: PurePosixPath('/a/b/c')", + '', + '', + ' .. py:method:: Math.decr(a, b=1)', + ' :module: target.typehints', + '', + '', + ' .. py:method:: Math.horse(a, b)', + ' :module: target.typehints', + '', + '', + ' .. py:method:: Math.incr(a, b=1)', + ' :module: target.typehints', + '', + '', + ' .. py:method:: Math.nothing()', + ' :module: target.typehints', + '', + '', + ' .. py:property:: Math.path', + ' :module: target.typehints', + '', + '', + ' .. py:property:: Math.prop', + ' :module: target.typehints', + '', + '', + '.. py:class:: NewAnnotation(i)', + ' :module: target.typehints', + '', + '', + '.. py:class:: NewComment(i)', + ' :module: target.typehints', + '', + '', + '.. py:class:: SignatureFromMetaclass(a)', + ' :module: target.typehints', + '', + '', + '.. py:class:: T', + ' :module: target.typehints', + '', + ' docstring', + '', + " alias of TypeVar('T', bound=\\ :py:class:`~pathlib.PurePosixPath`)", + '', + '', + '.. py:function:: complex_func(arg1, arg2, arg3=None, *args, **kwargs)', + ' :module: target.typehints', + '', + '', + '.. py:function:: decr(a, b=1)', + ' :module: target.typehints', + '', + '', + '.. py:function:: incr(a, b=1)', + ' :module: target.typehints', + '', + '', + '.. py:function:: missing_attr(c, a, b=None)', + ' :module: target.typehints', + '', + '', + '.. py:function:: tuple_args(x)', + ' :module: target.typehints', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc', + confoverrides={'autodoc_typehints': 'none'}) +def test_autodoc_typehints_none_for_overload(app): + options = {"members": None} + actual = do_autodoc(app, 'module', 'target.overload', options) + assert list(actual) == [ + '', + '.. py:module:: target.overload', + '', + '', + '.. py:class:: Bar(x, y)', + ' :module: target.overload', + '', + ' docstring', + '', + '', + '.. py:class:: Baz(x, y)', + ' :module: target.overload', + '', + ' docstring', + '', + '', + '.. py:class:: Foo(x, y)', + ' :module: target.overload', + '', + ' docstring', + '', + '', + '.. py:class:: Math()', + ' :module: target.overload', + '', + ' docstring', + '', + '', + ' .. py:method:: Math.sum(x, y=None)', + ' :module: target.overload', + '', + ' docstring', + '', + '', + '.. py:function:: sum(x, y=None)', + ' :module: target.overload', + '', + ' docstring', + '', + ] + + +@pytest.mark.sphinx('text', testroot='ext-autodoc', + confoverrides={'autodoc_typehints': "description"}, + freshenv=True) +def test_autodoc_typehints_description(app): + app.build() + context = (app.outdir / 'index.txt').read_text(encoding='utf8') + assert ('target.typehints.incr(a, b=1)\n' + '\n' + ' Parameters:\n' + ' * **a** (*int*) --\n' + '\n' + ' * **b** (*int*) --\n' + '\n' + ' Return type:\n' + ' int\n' + in context) + assert ('target.typehints.tuple_args(x)\n' + '\n' + ' Parameters:\n' + ' **x** (*tuple**[**int**, **int** | **str**]*) --\n' + '\n' + ' Return type:\n' + ' tuple[int, int]\n' + in context) + + # Overloads still get displayed in the signature + assert ('target.overload.sum(x: int, y: int = 0) -> int\n' + 'target.overload.sum(x: float, y: float = 0.0) -> float\n' + 'target.overload.sum(x: str, y: str = None) -> str\n' + '\n' + ' docstring\n' + in context) + + +@pytest.mark.sphinx('text', testroot='ext-autodoc', + confoverrides={'autodoc_typehints': "description", + 'autodoc_typehints_description_target': 'documented'}) +def test_autodoc_typehints_description_no_undoc(app): + # No :type: or :rtype: will be injected for `incr`, which does not have + # a description for its parameters or its return. `tuple_args` does + # describe them, so :type: and :rtype: will be added. + with overwrite_file(app.srcdir / 'index.rst', + '.. autofunction:: target.typehints.incr\n' + '\n' + '.. autofunction:: target.typehints.decr\n' + '\n' + ' :returns: decremented number\n' + '\n' + '.. autofunction:: target.typehints.tuple_args\n' + '\n' + ' :param x: arg\n' + ' :return: another tuple\n'): + app.build() + # Restore the original content of the file + context = (app.outdir / 'index.txt').read_text(encoding='utf8') + assert ('target.typehints.incr(a, b=1)\n' + '\n' + 'target.typehints.decr(a, b=1)\n' + '\n' + ' Returns:\n' + ' decremented number\n' + '\n' + ' Return type:\n' + ' int\n' + '\n' + 'target.typehints.tuple_args(x)\n' + '\n' + ' Parameters:\n' + ' **x** (*tuple**[**int**, **int** | **str**]*) -- arg\n' + '\n' + ' Returns:\n' + ' another tuple\n' + '\n' + ' Return type:\n' + ' tuple[int, int]\n' + in context) + + +@pytest.mark.sphinx('text', testroot='ext-autodoc', + confoverrides={'autodoc_typehints': "description", + 'autodoc_typehints_description_target': 'documented_params'}) +def test_autodoc_typehints_description_no_undoc_doc_rtype(app): + # No :type: will be injected for `incr`, which does not have a description + # for its parameters or its return, just :rtype: will be injected due to + # autodoc_typehints_description_target. `tuple_args` does describe both, so + # :type: and :rtype: will be added. `nothing` has no parameters but a return + # type of None, which will be added. + with overwrite_file(app.srcdir / 'index.rst', + '.. autofunction:: target.typehints.incr\n' + '\n' + '.. autofunction:: target.typehints.decr\n' + '\n' + ' :returns: decremented number\n' + '\n' + '.. autofunction:: target.typehints.tuple_args\n' + '\n' + ' :param x: arg\n' + ' :return: another tuple\n' + '\n' + '.. autofunction:: target.typehints.Math.nothing\n' + '\n' + '.. autofunction:: target.typehints.Math.horse\n' + '\n' + ' :return: nothing\n'): + app.build() + context = (app.outdir / 'index.txt').read_text(encoding='utf8') + assert context == ( + 'target.typehints.incr(a, b=1)\n' + '\n' + ' Return type:\n' + ' int\n' + '\n' + 'target.typehints.decr(a, b=1)\n' + '\n' + ' Returns:\n' + ' decremented number\n' + '\n' + ' Return type:\n' + ' int\n' + '\n' + 'target.typehints.tuple_args(x)\n' + '\n' + ' Parameters:\n' + ' **x** (*tuple**[**int**, **int** | **str**]*) -- arg\n' + '\n' + ' Returns:\n' + ' another tuple\n' + '\n' + ' Return type:\n' + ' tuple[int, int]\n' + '\n' + 'target.typehints.Math.nothing(self)\n' + '\n' + 'target.typehints.Math.horse(self, a, b)\n' + '\n' + ' Returns:\n' + ' nothing\n' + '\n' + ' Return type:\n' + ' None\n' + ) + + +@pytest.mark.sphinx('text', testroot='ext-autodoc', + confoverrides={'autodoc_typehints': "description"}) +def test_autodoc_typehints_description_with_documented_init(app): + with overwrite_file(app.srcdir / 'index.rst', + '.. autoclass:: target.typehints._ClassWithDocumentedInit\n' + ' :special-members: __init__\n'): + app.build() + context = (app.outdir / 'index.txt').read_text(encoding='utf8') + assert context == ( + 'class target.typehints._ClassWithDocumentedInit(x, *args, **kwargs)\n' + '\n' + ' Class docstring.\n' + '\n' + ' Parameters:\n' + ' * **x** (*int*) --\n' + '\n' + ' * **args** (*int*) --\n' + '\n' + ' * **kwargs** (*int*) --\n' + '\n' + ' __init__(x, *args, **kwargs)\n' + '\n' + ' Init docstring.\n' + '\n' + ' Parameters:\n' + ' * **x** (*int*) -- Some integer\n' + '\n' + ' * **args** (*int*) -- Some integer\n' + '\n' + ' * **kwargs** (*int*) -- Some integer\n' + '\n' + ' Return type:\n' + ' None\n' + ) + + +@pytest.mark.sphinx('text', testroot='ext-autodoc', + confoverrides={'autodoc_typehints': "description", + 'autodoc_typehints_description_target': 'documented'}) +def test_autodoc_typehints_description_with_documented_init_no_undoc(app): + with overwrite_file(app.srcdir / 'index.rst', + '.. autoclass:: target.typehints._ClassWithDocumentedInit\n' + ' :special-members: __init__\n'): + app.build() + context = (app.outdir / 'index.txt').read_text(encoding='utf8') + assert context == ( + 'class target.typehints._ClassWithDocumentedInit(x, *args, **kwargs)\n' + '\n' + ' Class docstring.\n' + '\n' + ' __init__(x, *args, **kwargs)\n' + '\n' + ' Init docstring.\n' + '\n' + ' Parameters:\n' + ' * **x** (*int*) -- Some integer\n' + '\n' + ' * **args** (*int*) -- Some integer\n' + '\n' + ' * **kwargs** (*int*) -- Some integer\n' + ) + + +@pytest.mark.sphinx('text', testroot='ext-autodoc', + confoverrides={'autodoc_typehints': "description", + 'autodoc_typehints_description_target': 'documented_params'}) +def test_autodoc_typehints_description_with_documented_init_no_undoc_doc_rtype(app): + # see test_autodoc_typehints_description_with_documented_init_no_undoc + # returnvalue_and_documented_params should not change class or method + # docstring. + with overwrite_file(app.srcdir / 'index.rst', + '.. autoclass:: target.typehints._ClassWithDocumentedInit\n' + ' :special-members: __init__\n'): + app.build() + context = (app.outdir / 'index.txt').read_text(encoding='utf8') + assert context == ( + 'class target.typehints._ClassWithDocumentedInit(x, *args, **kwargs)\n' + '\n' + ' Class docstring.\n' + '\n' + ' __init__(x, *args, **kwargs)\n' + '\n' + ' Init docstring.\n' + '\n' + ' Parameters:\n' + ' * **x** (*int*) -- Some integer\n' + '\n' + ' * **args** (*int*) -- Some integer\n' + '\n' + ' * **kwargs** (*int*) -- Some integer\n' + ) + + +@pytest.mark.sphinx('text', testroot='ext-autodoc', + confoverrides={'autodoc_typehints': "description"}) +def test_autodoc_typehints_description_for_invalid_node(app): + text = ".. py:function:: hello; world" + restructuredtext.parse(app, text) # raises no error + + +@pytest.mark.sphinx('text', testroot='ext-autodoc', + confoverrides={'autodoc_typehints': "both"}) +def test_autodoc_typehints_both(app): + with overwrite_file(app.srcdir / 'index.rst', + '.. autofunction:: target.typehints.incr\n' + '\n' + '.. autofunction:: target.typehints.tuple_args\n' + '\n' + '.. autofunction:: target.overload.sum\n'): + app.build() + context = (app.outdir / 'index.txt').read_text(encoding='utf8') + assert ('target.typehints.incr(a: int, b: int = 1) -> int\n' + '\n' + ' Parameters:\n' + ' * **a** (*int*) --\n' + '\n' + ' * **b** (*int*) --\n' + '\n' + ' Return type:\n' + ' int\n' + in context) + assert ('target.typehints.tuple_args(x: tuple[int, int | str]) -> tuple[int, int]\n' + '\n' + ' Parameters:\n' + ' **x** (*tuple**[**int**, **int** | **str**]*) --\n' + '\n' + ' Return type:\n' + ' tuple[int, int]\n' + in context) + + # Overloads still get displayed in the signature + assert ('target.overload.sum(x: int, y: int = 0) -> int\n' + 'target.overload.sum(x: float, y: float = 0.0) -> float\n' + 'target.overload.sum(x: str, y: str = None) -> str\n' + '\n' + ' docstring\n' + in context) + + +@pytest.mark.sphinx('text', testroot='ext-autodoc') +def test_autodoc_type_aliases(app): + # default + options = {"members": None} + actual = do_autodoc(app, 'module', 'target.autodoc_type_aliases', options) + assert list(actual) == [ + '', + '.. py:module:: target.autodoc_type_aliases', + '', + '', + '.. py:class:: Foo()', + ' :module: target.autodoc_type_aliases', + '', + ' docstring', + '', + '', + ' .. py:attribute:: Foo.attr1', + ' :module: target.autodoc_type_aliases', + ' :type: int', + '', + ' docstring', + '', + '', + ' .. py:attribute:: Foo.attr2', + ' :module: target.autodoc_type_aliases', + ' :type: int', + '', + ' docstring', + '', + '', + '.. py:function:: mult(x: int, y: int) -> int', + ' mult(x: float, y: float) -> float', + ' :module: target.autodoc_type_aliases', + '', + ' docstring', + '', + '', + '.. py:function:: read(r: ~_io.BytesIO) -> ~_io.StringIO', + ' :module: target.autodoc_type_aliases', + '', + ' docstring', + '', + '', + '.. py:function:: sum(x: int, y: int) -> int', + ' :module: target.autodoc_type_aliases', + '', + ' docstring', + '', + '', + '.. py:data:: variable', + ' :module: target.autodoc_type_aliases', + ' :type: int', + '', + ' docstring', + '', + '', + '.. py:data:: variable2', + ' :module: target.autodoc_type_aliases', + ' :type: int', + ' :value: None', + '', + ' docstring', + '', + '', + '.. py:data:: variable3', + ' :module: target.autodoc_type_aliases', + ' :type: int | None', + '', + ' docstring', + '', + ] + + # define aliases + app.config.autodoc_type_aliases = {'myint': 'myint', + 'io.StringIO': 'my.module.StringIO'} + actual = do_autodoc(app, 'module', 'target.autodoc_type_aliases', options) + assert list(actual) == [ + '', + '.. py:module:: target.autodoc_type_aliases', + '', + '', + '.. py:class:: Foo()', + ' :module: target.autodoc_type_aliases', + '', + ' docstring', + '', + '', + ' .. py:attribute:: Foo.attr1', + ' :module: target.autodoc_type_aliases', + ' :type: myint', + '', + ' docstring', + '', + '', + ' .. py:attribute:: Foo.attr2', + ' :module: target.autodoc_type_aliases', + ' :type: myint', + '', + ' docstring', + '', + '', + '.. py:function:: mult(x: myint, y: myint) -> myint', + ' mult(x: float, y: float) -> float', + ' :module: target.autodoc_type_aliases', + '', + ' docstring', + '', + '', + '.. py:function:: read(r: ~_io.BytesIO) -> my.module.StringIO', + ' :module: target.autodoc_type_aliases', + '', + ' docstring', + '', + '', + '.. py:function:: sum(x: myint, y: myint) -> myint', + ' :module: target.autodoc_type_aliases', + '', + ' docstring', + '', + '', + '.. py:data:: variable', + ' :module: target.autodoc_type_aliases', + ' :type: myint', + '', + ' docstring', + '', + '', + '.. py:data:: variable2', + ' :module: target.autodoc_type_aliases', + ' :type: myint', + ' :value: None', + '', + ' docstring', + '', + '', + '.. py:data:: variable3', + ' :module: target.autodoc_type_aliases', + ' :type: myint | None', + '', + ' docstring', + '', + ] + + +@pytest.mark.sphinx('text', testroot='ext-autodoc', + srcdir='autodoc_typehints_description_and_type_aliases', + confoverrides={'autodoc_typehints': "description", + 'autodoc_type_aliases': {'myint': 'myint'}}) +def test_autodoc_typehints_description_and_type_aliases(app): + with overwrite_file(app.srcdir / 'autodoc_type_aliases.rst', + '.. autofunction:: target.autodoc_type_aliases.sum'): + app.build() + context = (app.outdir / 'autodoc_type_aliases.txt').read_text(encoding='utf8') + assert context == ( + 'target.autodoc_type_aliases.sum(x, y)\n' + '\n' + ' docstring\n' + '\n' + ' Parameters:\n' + ' * **x** (*myint*) --\n' + '\n' + ' * **y** (*myint*) --\n' + '\n' + ' Return type:\n' + ' myint\n' + ) + + +@pytest.mark.sphinx('html', testroot='ext-autodoc', + confoverrides={'autodoc_typehints_format': "fully-qualified"}) +def test_autodoc_typehints_format_fully_qualified(app): + if sys.version_info[:2] <= (3, 10): + type_o = "typing.Any | None" + else: + type_o = "typing.Any" + + options = {"members": None, + "undoc-members": None} + actual = do_autodoc(app, 'module', 'target.typehints', options) + assert list(actual) == [ + '', + '.. py:module:: target.typehints', + '', + '', + '.. py:data:: CONST1', + ' :module: target.typehints', + ' :type: int', + '', + '', + '.. py:data:: CONST2', + ' :module: target.typehints', + ' :type: int', + ' :value: 1', + '', + ' docstring', + '', + '', + '.. py:data:: CONST3', + ' :module: target.typehints', + ' :type: pathlib.PurePosixPath', + " :value: PurePosixPath('/a/b/c')", + '', + ' docstring', + '', + '', + '.. py:class:: Math(s: str, o: %s = None)' % type_o, + ' :module: target.typehints', + '', + '', + ' .. py:attribute:: Math.CONST1', + ' :module: target.typehints', + ' :type: int', + '', + '', + ' .. py:attribute:: Math.CONST2', + ' :module: target.typehints', + ' :type: int', + ' :value: 1', + '', + '', + ' .. py:attribute:: Math.CONST3', + ' :module: target.typehints', + ' :type: pathlib.PurePosixPath', + " :value: PurePosixPath('/a/b/c')", + '', + '', + ' .. py:method:: Math.decr(a: int, b: int = 1) -> int', + ' :module: target.typehints', + '', + '', + ' .. py:method:: Math.horse(a: str, b: int) -> None', + ' :module: target.typehints', + '', + '', + ' .. py:method:: Math.incr(a: int, b: int = 1) -> int', + ' :module: target.typehints', + '', + '', + ' .. py:method:: Math.nothing() -> None', + ' :module: target.typehints', + '', + '', + ' .. py:property:: Math.path', + ' :module: target.typehints', + ' :type: pathlib.PurePosixPath', + '', + '', + ' .. py:property:: Math.prop', + ' :module: target.typehints', + ' :type: int', + '', + '', + '.. py:class:: NewAnnotation(i: int)', + ' :module: target.typehints', + '', + '', + '.. py:class:: NewComment(i: int)', + ' :module: target.typehints', + '', + '', + '.. py:class:: SignatureFromMetaclass(a: int)', + ' :module: target.typehints', + '', + '', + '.. py:class:: T', + ' :module: target.typehints', + '', + ' docstring', + '', + " alias of TypeVar('T', bound=\\ :py:class:`pathlib.PurePosixPath`)", + '', + '', + '.. py:function:: complex_func(arg1: str, arg2: List[int], arg3: Tuple[int, ' + 'Union[str, Unknown]] = None, *args: str, **kwargs: str) -> None', + ' :module: target.typehints', + '', + '', + '.. py:function:: decr(a: int, b: int = 1) -> int', + ' :module: target.typehints', + '', + '', + '.. py:function:: incr(a: int, b: int = 1) -> int', + ' :module: target.typehints', + '', + '', + '.. py:function:: missing_attr(c, a: str, b: Optional[str] = None) -> str', + ' :module: target.typehints', + '', + '', + '.. py:function:: tuple_args(x: tuple[int, int | str]) -> tuple[int, int]', + ' :module: target.typehints', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc', + confoverrides={'autodoc_typehints_format': "fully-qualified"}) +def test_autodoc_typehints_format_fully_qualified_for_class_alias(app): + actual = do_autodoc(app, 'class', 'target.classes.Alias') + assert list(actual) == [ + '', + '.. py:attribute:: Alias', + ' :module: target.classes', + '', + ' alias of :py:class:`target.classes.Foo`', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc', + confoverrides={'autodoc_typehints_format': "fully-qualified"}) +def test_autodoc_typehints_format_fully_qualified_for_generic_alias(app): + actual = do_autodoc(app, 'data', 'target.genericalias.L') + assert list(actual) == [ + '', + '.. py:data:: L', + ' :module: target.genericalias', + '', + ' A list of Class', + '', + ' alias of :py:class:`~typing.List`\\ [:py:class:`target.genericalias.Class`]', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc', + confoverrides={'autodoc_typehints_format': "fully-qualified"}) +def test_autodoc_typehints_format_fully_qualified_for_newtype_alias(app): + actual = do_autodoc(app, 'class', 'target.typevar.T6') + assert list(actual) == [ + '', + '.. py:class:: T6', + ' :module: target.typevar', + '', + ' T6', + '', + ' alias of :py:class:`datetime.date`', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_default_options(app): + # no settings + actual = do_autodoc(app, 'class', 'target.enums.EnumCls') + assert ' .. py:attribute:: EnumCls.val1' not in actual + assert ' .. py:attribute:: EnumCls.val4' not in actual + actual = do_autodoc(app, 'class', 'target.CustomIter') + assert ' .. py:method:: target.CustomIter' not in actual + actual = do_autodoc(app, 'module', 'target') + assert '.. py:function:: function_to_be_imported(app)' not in actual + + # with :members: + app.config.autodoc_default_options = {'members': None} + actual = do_autodoc(app, 'class', 'target.enums.EnumCls') + assert ' .. py:attribute:: EnumCls.val1' in actual + assert ' .. py:attribute:: EnumCls.val4' not in actual + + # with :members: = True + app.config.autodoc_default_options = {'members': None} + actual = do_autodoc(app, 'class', 'target.enums.EnumCls') + assert ' .. py:attribute:: EnumCls.val1' in actual + assert ' .. py:attribute:: EnumCls.val4' not in actual + + # with :members: and :undoc-members: + app.config.autodoc_default_options = { + 'members': None, + 'undoc-members': None, + } + actual = do_autodoc(app, 'class', 'target.enums.EnumCls') + assert ' .. py:attribute:: EnumCls.val1' in actual + assert ' .. py:attribute:: EnumCls.val4' in actual + + # with :special-members: + # Note that :members: must be *on* for :special-members: to work. + app.config.autodoc_default_options = { + 'members': None, + 'special-members': None, + } + actual = do_autodoc(app, 'class', 'target.CustomIter') + assert ' .. py:method:: CustomIter.__init__()' in actual + assert ' Create a new `CustomIter`.' in actual + assert ' .. py:method:: CustomIter.__iter__()' in actual + assert ' Iterate squares of each value.' in actual + if not IS_PYPY: + assert ' .. py:attribute:: CustomIter.__weakref__' in actual + assert ' list of weak references to the object (if defined)' in actual + + # :exclude-members: None - has no effect. Unlike :members:, + # :special-members:, etc. where None == "include all", here None means + # "no/false/off". + app.config.autodoc_default_options = { + 'members': None, + 'exclude-members': None, + } + actual = do_autodoc(app, 'class', 'target.enums.EnumCls') + assert ' .. py:attribute:: EnumCls.val1' in actual + assert ' .. py:attribute:: EnumCls.val4' not in actual + app.config.autodoc_default_options = { + 'members': None, + 'special-members': None, + 'exclude-members': None, + } + actual = do_autodoc(app, 'class', 'target.CustomIter') + assert ' .. py:method:: CustomIter.__init__()' in actual + assert ' Create a new `CustomIter`.' in actual + assert ' .. py:method:: CustomIter.__iter__()' in actual + assert ' Iterate squares of each value.' in actual + if not IS_PYPY: + assert ' .. py:attribute:: CustomIter.__weakref__' in actual + assert ' list of weak references to the object (if defined)' in actual + assert ' .. py:method:: CustomIter.snafucate()' in actual + assert ' Makes this snafucated.' in actual + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_autodoc_default_options_with_values(app): + # with :members: + app.config.autodoc_default_options = {'members': 'val1,val2'} + actual = do_autodoc(app, 'class', 'target.enums.EnumCls') + assert ' .. py:attribute:: EnumCls.val1' in actual + assert ' .. py:attribute:: EnumCls.val2' in actual + assert ' .. py:attribute:: EnumCls.val3' not in actual + assert ' .. py:attribute:: EnumCls.val4' not in actual + + # with :member-order: + app.config.autodoc_default_options = { + 'members': None, + 'member-order': 'bysource', + } + actual = do_autodoc(app, 'class', 'target.Class') + assert list(filter(lambda l: '::' in l, actual)) == [ + '.. py:class:: Class(arg)', + ' .. py:method:: Class.meth()', + ' .. py:method:: Class.skipmeth()', + ' .. py:method:: Class.excludemeth()', + ' .. py:attribute:: Class.attr', + ' .. py:attribute:: Class.docattr', + ' .. py:attribute:: Class.udocattr', + ' .. py:attribute:: Class.mdocattr', + ' .. py:method:: Class.moore(a, e, f) -> happiness', + ' .. py:attribute:: Class.inst_attr_inline', + ' .. py:attribute:: Class.inst_attr_comment', + ' .. py:attribute:: Class.inst_attr_string', + ] + + # with :special-members: + app.config.autodoc_default_options = { + 'special-members': '__init__,__iter__', + } + actual = do_autodoc(app, 'class', 'target.CustomIter') + assert ' .. py:method:: CustomIter.__init__()' in actual + assert ' Create a new `CustomIter`.' in actual + assert ' .. py:method:: CustomIter.__iter__()' in actual + assert ' Iterate squares of each value.' in actual + if not IS_PYPY: + assert ' .. py:attribute:: CustomIter.__weakref__' not in actual + assert ' list of weak references to the object (if defined)' not in actual + + # with :exclude-members: + app.config.autodoc_default_options = { + 'members': None, + 'exclude-members': 'val1', + } + actual = do_autodoc(app, 'class', 'target.enums.EnumCls') + assert ' .. py:attribute:: EnumCls.val1' not in actual + assert ' .. py:attribute:: EnumCls.val2' in actual + assert ' .. py:attribute:: EnumCls.val3' in actual + assert ' .. py:attribute:: EnumCls.val4' not in actual + app.config.autodoc_default_options = { + 'members': None, + 'special-members': None, + 'exclude-members': '__weakref__,snafucate', + } + actual = do_autodoc(app, 'class', 'target.CustomIter') + assert ' .. py:method:: CustomIter.__init__()' in actual + assert ' Create a new `CustomIter`.' in actual + assert ' .. py:method:: CustomIter.__iter__()' in actual + assert ' Iterate squares of each value.' in actual + if not IS_PYPY: + assert ' .. py:attribute:: CustomIter.__weakref__' not in actual + assert ' list of weak references to the object (if defined)' not in actual + assert ' .. py:method:: CustomIter.snafucate()' not in actual + assert ' Makes this snafucated.' not in actual diff --git a/tests/test_ext_autodoc_events.py b/tests/test_ext_autodoc_events.py new file mode 100644 index 0000000..d821f4c --- /dev/null +++ b/tests/test_ext_autodoc_events.py @@ -0,0 +1,118 @@ +"""Test the autodoc extension. This tests mainly for autodoc events""" + +import pytest + +from sphinx.ext.autodoc import between, cut_lines + +from .test_ext_autodoc import do_autodoc + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_process_docstring(app): + def on_process_docstring(app, what, name, obj, options, lines): + lines.clear() + lines.append('my docstring') + + app.connect('autodoc-process-docstring', on_process_docstring) + + actual = do_autodoc(app, 'function', 'target.process_docstring.func') + assert list(actual) == [ + '', + '.. py:function:: func()', + ' :module: target.process_docstring', + '', + ' my docstring', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_process_docstring_for_nondatadescriptor(app): + def on_process_docstring(app, what, name, obj, options, lines): + raise + + app.connect('autodoc-process-docstring', on_process_docstring) + + actual = do_autodoc(app, 'attribute', 'target.AttCls.a1') + assert list(actual) == [ + '', + '.. py:attribute:: AttCls.a1', + ' :module: target', + ' :value: hello world', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_cut_lines(app): + app.connect('autodoc-process-docstring', + cut_lines(2, 2, ['function'])) + + actual = do_autodoc(app, 'function', 'target.process_docstring.func') + assert list(actual) == [ + '', + '.. py:function:: func()', + ' :module: target.process_docstring', + '', + ' second line', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_between(app): + app.connect('autodoc-process-docstring', + between('---', ['function'])) + + actual = do_autodoc(app, 'function', 'target.process_docstring.func') + assert list(actual) == [ + '', + '.. py:function:: func()', + ' :module: target.process_docstring', + '', + ' second line', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_between_exclude(app): + app.connect('autodoc-process-docstring', + between('---', ['function'], exclude=True)) + + actual = do_autodoc(app, 'function', 'target.process_docstring.func') + assert list(actual) == [ + '', + '.. py:function:: func()', + ' :module: target.process_docstring', + '', + ' first line', + ' third line', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_skip_module_member(app): + def autodoc_skip_member(app, what, name, obj, skip, options): + if name == "Class": + return True # Skip "Class" class in __all__ + elif name == "raises": + return False # Show "raises()" function (not in __all__) + return None + + app.connect('autodoc-skip-member', autodoc_skip_member) + + options = {"members": None} + actual = do_autodoc(app, 'module', 'target', options) + assert list(actual) == [ + '', + '.. py:module:: target', + '', + '', + '.. py:function:: raises(exc, func, *args, **kwds)', + ' :module: target', + '', + ' Raise AssertionError if ``func(*args, **kwds)`` does not raise *exc*.', + '', + ] diff --git a/tests/test_ext_autodoc_mock.py b/tests/test_ext_autodoc_mock.py new file mode 100644 index 0000000..3b90693 --- /dev/null +++ b/tests/test_ext_autodoc_mock.py @@ -0,0 +1,152 @@ +"""Test the autodoc extension.""" + +from __future__ import annotations + +import abc +import sys +from importlib import import_module +from typing import TypeVar + +import pytest + +from sphinx.ext.autodoc.mock import _MockModule, _MockObject, ismock, mock, undecorate + + +def test_MockModule(): + mock = _MockModule('mocked_module') + assert isinstance(mock.some_attr, _MockObject) + assert isinstance(mock.some_method, _MockObject) + assert isinstance(mock.attr1.attr2, _MockObject) + assert isinstance(mock.attr1.attr2.meth(), _MockObject) + + assert repr(mock.some_attr) == 'mocked_module.some_attr' + assert repr(mock.some_method) == 'mocked_module.some_method' + assert repr(mock.attr1.attr2) == 'mocked_module.attr1.attr2' + assert repr(mock.attr1.attr2.meth) == 'mocked_module.attr1.attr2.meth' + + assert repr(mock) == 'mocked_module' + + +def test_MockObject(): + mock = _MockObject() + assert isinstance(mock.some_attr, _MockObject) + assert isinstance(mock.some_method, _MockObject) + assert isinstance(mock.attr1.attr2, _MockObject) + assert isinstance(mock.attr1.attr2.meth(), _MockObject) + + # subclassing + class SubClass(mock.SomeClass): + """docstring of SubClass""" + + def method(self): + return "string" + + obj = SubClass() + assert SubClass.__doc__ == "docstring of SubClass" + assert isinstance(obj, SubClass) + assert obj.method() == "string" + assert isinstance(obj.other_method(), SubClass) + + # parametrized type + T = TypeVar('T') + + class SubClass2(mock.SomeClass[T]): + """docstring of SubClass""" + + obj2 = SubClass2() + assert SubClass2.__doc__ == "docstring of SubClass" + assert isinstance(obj2, SubClass2) + + +def test_mock(): + modname = 'sphinx.unknown' + submodule = modname + '.submodule' + assert modname not in sys.modules + with pytest.raises(ImportError): + import_module(modname) + + with mock([modname]): + import_module(modname) + assert modname in sys.modules + assert isinstance(sys.modules[modname], _MockModule) + + # submodules are also mocked + import_module(submodule) + assert submodule in sys.modules + assert isinstance(sys.modules[submodule], _MockModule) + + assert modname not in sys.modules + with pytest.raises(ImportError): + import_module(modname) + + +def test_mock_does_not_follow_upper_modules(): + with mock(['sphinx.unknown.module']): # NoQA: SIM117 + with pytest.raises(ImportError): + import_module('sphinx.unknown') + + +def test_abc_MockObject(): + mock = _MockObject() + + class Base: + @abc.abstractmethod + def __init__(self): + pass + + class Derived(Base, mock.SubClass): + pass + + obj = Derived() + assert isinstance(obj, Base) + assert isinstance(obj, _MockObject) + assert isinstance(obj.some_method(), Derived) + + +def test_mock_decorator(): + mock = _MockObject() + + @mock.function_deco + def func(): + pass + + class Foo: + @mock.method_deco + def meth(self): + pass + + @classmethod + @mock.method_deco + def class_meth(cls): + pass + + @mock.class_deco + class Bar: + pass + + @mock.funcion_deco(Foo) + class Baz: + pass + + assert undecorate(func).__name__ == "func" + assert undecorate(Foo.meth).__name__ == "meth" + assert undecorate(Foo.class_meth).__name__ == "class_meth" + assert undecorate(Bar).__name__ == "Bar" + assert undecorate(Baz).__name__ == "Baz" + + +def test_ismock(): + with mock(['sphinx.unknown']): + mod1 = import_module('sphinx.unknown') + mod2 = import_module('sphinx.application') + + class Inherited(mod1.Class): + pass + + assert ismock(mod1) is True + assert ismock(mod1.Class) is True + assert ismock(mod1.submod.Class) is True + assert ismock(Inherited) is False + + assert ismock(mod2) is False + assert ismock(mod2.Sphinx) is False diff --git a/tests/test_ext_autodoc_preserve_defaults.py b/tests/test_ext_autodoc_preserve_defaults.py new file mode 100644 index 0000000..70b6146 --- /dev/null +++ b/tests/test_ext_autodoc_preserve_defaults.py @@ -0,0 +1,192 @@ +"""Test the autodoc extension.""" + +import pytest + +from .test_ext_autodoc import do_autodoc + + +@pytest.mark.sphinx('html', testroot='ext-autodoc', + confoverrides={'autodoc_preserve_defaults': True}) +def test_preserve_defaults(app): + color = "0xFFFFFF" + + options = {"members": None} + actual = do_autodoc(app, 'module', 'target.preserve_defaults', options) + assert list(actual) == [ + '', + '.. py:module:: target.preserve_defaults', + '', + '', + '.. py:class:: Class()', + ' :module: target.preserve_defaults', + '', + ' docstring', + '', + '', + ' .. py:method:: Class.clsmeth(name: str = CONSTANT, sentinel: ~typing.Any = ' + 'SENTINEL, now: ~datetime.datetime = datetime.now(), color: int = %s, *, ' + 'kwarg1, kwarg2=%s) -> None' % (color, color), + ' :module: target.preserve_defaults', + ' :classmethod:', + '', + ' docstring', + '', + '', + ' .. py:method:: Class.meth(name: str = CONSTANT, sentinel: ~typing.Any = ' + 'SENTINEL, now: ~datetime.datetime = datetime.now(), color: int = %s, *, ' + 'kwarg1, kwarg2=%s) -> None' % (color, color), + ' :module: target.preserve_defaults', + '', + ' docstring', + '', + '', + '.. py:class:: MultiLine()', + ' :module: target.preserve_defaults', + '', + ' docstring', + '', + '', + ' .. py:property:: MultiLine.prop1', + ' :module: target.preserve_defaults', + '', + ' docstring', + '', + '', + ' .. py:property:: MultiLine.prop2', + ' :module: target.preserve_defaults', + '', + ' docstring', + '', + '', + ' .. py:property:: MultiLine.prop3', + ' :module: target.preserve_defaults', + '', + ' docstring', + '', + '', + ' .. py:property:: MultiLine.prop4', + ' :module: target.preserve_defaults', + '', + ' docstring', + '', + '', + ' .. py:property:: MultiLine.prop5', + ' :module: target.preserve_defaults', + '', + ' docstring', + '', + '', + '.. py:function:: foo(name: str = CONSTANT, sentinel: ~typing.Any = SENTINEL, ' + 'now: ~datetime.datetime = datetime.now(), color: int = %s, *, kwarg1, ' + 'kwarg2=%s) -> None' % (color, color), + ' :module: target.preserve_defaults', + '', + ' docstring', + '', + '', + '.. py:function:: get_sentinel(custom=SENTINEL)', + ' :module: target.preserve_defaults', + '', + ' docstring', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc', + confoverrides={'autodoc_preserve_defaults': True}) +def test_preserve_defaults_special_constructs(app): + options = {"members": None} + actual = do_autodoc(app, 'module', 'target.preserve_defaults_special_constructs', options) + + # * dataclasses.dataclass: + # - __init__ source code is not available + # - default values specified at class level are not discovered + # - values wrapped in a field(...) expression cannot be analyzed + # easily even if annotations were to be parsed + # * typing.NamedTuple: + # - __init__ source code is not available + # - default values specified at class level are not discovered + # * collections.namedtuple: + # - default values are specified as "default=(d1, d2, ...)" + # + # In the future, it might be possible to find some additional default + # values by parsing the source code of the annotations but the task is + # rather complex. + + assert list(actual) == [ + '', + '.. py:module:: target.preserve_defaults_special_constructs', + '', + '', + '.. py:class:: DataClass(' + 'a: int, b: object = <object object>, c: list[int] = <factory>)', + ' :module: target.preserve_defaults_special_constructs', + '', + ' docstring', + '', + '', + '.. py:class:: DataClassNoInit()', + ' :module: target.preserve_defaults_special_constructs', + '', + ' docstring', + '', + '', + '.. py:class:: MyNamedTuple1(' + 'a: int, b: object = <object object>, c: list[int] = [1, 2, 3])', + ' :module: target.preserve_defaults_special_constructs', + '', + ' docstring', + '', + '', + ' .. py:attribute:: MyNamedTuple1.a', + ' :module: target.preserve_defaults_special_constructs', + ' :type: int', + '', + ' Alias for field number 0', + '', + '', + ' .. py:attribute:: MyNamedTuple1.b', + ' :module: target.preserve_defaults_special_constructs', + ' :type: object', + '', + ' Alias for field number 1', + '', + '', + ' .. py:attribute:: MyNamedTuple1.c', + ' :module: target.preserve_defaults_special_constructs', + ' :type: list[int]', + '', + ' Alias for field number 2', + '', + '', + '.. py:class:: MyNamedTuple2(a=0, b=<object object>)', + ' :module: target.preserve_defaults_special_constructs', + '', + ' docstring', + '', + '', + '.. py:class:: MyTypedDict', + ' :module: target.preserve_defaults_special_constructs', + '', + ' docstring', + '', + '', + '.. py:data:: SENTINEL', + ' :module: target.preserve_defaults_special_constructs', + ' :value: <object object>', + '', + ' docstring', + '', + '', + '.. py:function:: foo(x, y, z=SENTINEL)', + ' :module: target.preserve_defaults_special_constructs', + '', + ' docstring', + '', + '', + '.. py:function:: ze_lambda(z=SENTINEL)', + ' :module: target.preserve_defaults_special_constructs', + '', + ' docstring', + '', + ] diff --git a/tests/test_ext_autodoc_private_members.py b/tests/test_ext_autodoc_private_members.py new file mode 100644 index 0000000..bf707bf --- /dev/null +++ b/tests/test_ext_autodoc_private_members.py @@ -0,0 +1,158 @@ +"""Test the autodoc extension. This tests mainly for private-members option. +""" + +import pytest + +from .test_ext_autodoc import do_autodoc + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_private_field(app): + app.config.autoclass_content = 'class' + options = {"members": None} + actual = do_autodoc(app, 'module', 'target.private', options) + assert list(actual) == [ + '', + '.. py:module:: target.private', + '', + '', + '.. py:data:: _PUBLIC_CONSTANT', + ' :module: target.private', + ' :value: None', + '', + ' :meta public:', + '', + '', + '.. py:function:: _public_function(name)', + ' :module: target.private', + '', + ' public_function is a docstring().', + '', + ' :meta public:', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_private_field_and_private_members(app): + app.config.autoclass_content = 'class' + options = {"members": None, + "private-members": None} + actual = do_autodoc(app, 'module', 'target.private', options) + assert list(actual) == [ + '', + '.. py:module:: target.private', + '', + '', + '.. py:data:: PRIVATE_CONSTANT', + ' :module: target.private', + ' :value: None', + '', + ' :meta private:', + '', + '', + '.. py:data:: _PUBLIC_CONSTANT', + ' :module: target.private', + ' :value: None', + '', + ' :meta public:', + '', + '', + '.. py:function:: _public_function(name)', + ' :module: target.private', + '', + ' public_function is a docstring().', + '', + ' :meta public:', + '', + '', + '.. py:function:: private_function(name)', + ' :module: target.private', + '', + ' private_function is a docstring().', + '', + ' :meta private:', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_private_members(app): + app.config.autoclass_content = 'class' + options = {"members": None, + "private-members": "_PUBLIC_CONSTANT,_public_function"} + actual = do_autodoc(app, 'module', 'target.private', options) + assert list(actual) == [ + '', + '.. py:module:: target.private', + '', + '', + '.. py:data:: _PUBLIC_CONSTANT', + ' :module: target.private', + ' :value: None', + '', + ' :meta public:', + '', + '', + '.. py:function:: _public_function(name)', + ' :module: target.private', + '', + ' public_function is a docstring().', + '', + ' :meta public:', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_private_attributes(app): + app.config.autoclass_content = 'class' + options = {"members": None} + actual = do_autodoc(app, 'class', 'target.private.Foo', options) + assert list(actual) == [ + '', + '.. py:class:: Foo()', + ' :module: target.private', + '', + '', + ' .. py:attribute:: Foo._public_attribute', + ' :module: target.private', + ' :value: 47', + '', + ' A public class attribute whose name starts with an underscore.', + '', + ' :meta public:', + '', + ] + + +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_private_attributes_and_private_members(app): + app.config.autoclass_content = 'class' + options = {"members": None, + "private-members": None} + actual = do_autodoc(app, 'class', 'target.private.Foo', options) + assert list(actual) == [ + '', + '.. py:class:: Foo()', + ' :module: target.private', + '', + '', + ' .. py:attribute:: Foo._public_attribute', + ' :module: target.private', + ' :value: 47', + '', + ' A public class attribute whose name starts with an underscore.', + '', + ' :meta public:', + '', + '', + ' .. py:attribute:: Foo.private_attribute', + ' :module: target.private', + ' :value: 11', + '', + ' A private class attribute whose name does not start with an underscore.', + '', + ' :meta private:', + '', + ] diff --git a/tests/test_ext_autosectionlabel.py b/tests/test_ext_autosectionlabel.py new file mode 100644 index 0000000..f99a6d3 --- /dev/null +++ b/tests/test_ext_autosectionlabel.py @@ -0,0 +1,77 @@ +"""Test sphinx.ext.autosectionlabel extension.""" + +import re + +import pytest + + +@pytest.mark.sphinx('html', testroot='ext-autosectionlabel') +def test_autosectionlabel_html(app, status, warning, skipped_labels=False): + app.builder.build_all() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + html = ('<li><p><a class="reference internal" href="#introduce-of-sphinx">' + '<span class=".*?">Introduce of Sphinx</span></a></p></li>') + assert re.search(html, content, re.S) + + html = ('<li><p><a class="reference internal" href="#installation">' + '<span class="std std-ref">Installation</span></a></p></li>') + assert re.search(html, content, re.S) + + html = ('<li><p><a class="reference internal" href="#for-windows-users">' + '<span class="std std-ref">For Windows users</span></a></p></li>') + assert re.search(html, content, re.S) + + html = ('<li><p><a class="reference internal" href="#for-unix-users">' + '<span class="std std-ref">For UNIX users</span></a></p></li>') + assert re.search(html, content, re.S) + + html = ('<li><p><a class="reference internal" href="#linux">' + '<span class="std std-ref">Linux</span></a></p></li>') + assert re.search(html, content, re.S) + + html = ('<li><p><a class="reference internal" href="#freebsd">' + '<span class="std std-ref">FreeBSD</span></a></p></li>') + assert re.search(html, content, re.S) + + # for smart_quotes (refs: #4027) + html = ('<li><p><a class="reference internal" ' + 'href="#this-one-s-got-an-apostrophe">' + '<span class="std std-ref">This one’s got an apostrophe' + '</span></a></p></li>') + assert re.search(html, content, re.S) + + +# Re-use test definition from above, just change the test root directory +@pytest.mark.sphinx('html', testroot='ext-autosectionlabel-prefix-document') +def test_autosectionlabel_prefix_document_html(app, status, warning): + test_autosectionlabel_html(app, status, warning) + + +@pytest.mark.sphinx('html', testroot='ext-autosectionlabel', + confoverrides={'autosectionlabel_maxdepth': 3}) +def test_autosectionlabel_maxdepth(app, status, warning): + app.builder.build_all() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + + # depth: 1 + html = ('<li><p><a class="reference internal" href="#test-ext-autosectionlabel">' + '<span class=".*?">test-ext-autosectionlabel</span></a></p></li>') + assert re.search(html, content, re.S) + + # depth: 2 + html = ('<li><p><a class="reference internal" href="#installation">' + '<span class="std std-ref">Installation</span></a></p></li>') + assert re.search(html, content, re.S) + + # depth: 3 + html = ('<li><p><a class="reference internal" href="#for-windows-users">' + '<span class="std std-ref">For Windows users</span></a></p></li>') + assert re.search(html, content, re.S) + + # depth: 4 + html = '<li><p><span class="xref std std-ref">Linux</span></p></li>' + assert re.search(html, content, re.S) + + assert "WARNING: undefined label: 'linux'" in warning.getvalue() diff --git a/tests/test_ext_autosummary.py b/tests/test_ext_autosummary.py new file mode 100644 index 0000000..43f3ae0 --- /dev/null +++ b/tests/test_ext_autosummary.py @@ -0,0 +1,686 @@ +"""Test the autosummary extension.""" + +import sys +from io import StringIO +from pathlib import Path +from unittest.mock import Mock, patch + +import pytest +from docutils import nodes + +from sphinx import addnodes +from sphinx.ext.autosummary import ( + autosummary_table, + autosummary_toc, + extract_summary, + import_by_name, + mangle_signature, +) +from sphinx.ext.autosummary.generate import ( + AutosummaryEntry, + generate_autosummary_content, + generate_autosummary_docs, +) +from sphinx.ext.autosummary.generate import main as autogen_main +from sphinx.testing.util import assert_node, etree_parse +from sphinx.util.docutils import new_document + +try: + from contextlib import chdir +except ImportError: + from sphinx.util.osutil import _chdir as chdir + +html_warnfile = StringIO() + + +default_kw = { + 'testroot': 'autosummary', + 'confoverrides': { + 'extensions': ['sphinx.ext.autosummary'], + 'autosummary_generate': True, + 'autosummary_generate_overwrite': False, + 'source_suffix': '.rst', + }, +} + + +@pytest.fixture(autouse=True) +def _unload_target_module(): + sys.modules.pop('target', None) + + +def test_mangle_signature(): + TEST = """ + () :: () + (a, b, c, d, e) :: (a, b, c, d, e) + (a, b, c=1, d=2, e=3) :: (a, b[, c, d, e]) + (a, b, aaa=1, bbb=1, ccc=1, eee=1, fff=1, ggg=1, hhh=1, iii=1, jjj=1)\ + :: (a, b[, aaa, bbb, ccc, ...]) + (a, b, c=(), d=<foo>) :: (a, b[, c, d]) + (a, b, c='foobar()', d=123) :: (a, b[, c, d]) + (a, b[, c]) :: (a, b[, c]) + (a, b[, cxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx]) :: (a, b[, ...) + (a, b='c=d, e=f, g=h', c=3) :: (a[, b, c]) + (a, b="c=d, e=f, g=h", c=3) :: (a[, b, c]) + (a, b='c=d, \\'e=f,\\' g=h', c=3) :: (a[, b, c]) + (a, b='c=d, ', e='\\\\' g=h, c=3) :: (a[, b, e, c]) + (a, b={'c=d, ': 3, '\\\\': 3}) :: (a[, b]) + (a=1, b=2, c=3) :: ([a, b, c]) + (a=1, b=<SomeClass: a, b, c>, c=3) :: ([a, b, c]) + (a=1, b=T(a=1, b=2), c=3) :: ([a, b, c]) + (a: Tuple[int, str], b: int) -> str :: (a, b) + """ + + TEST = [[y.strip() for y in x.split("::")] for x in TEST.split("\n") + if '::' in x] + for inp, outp in TEST: + res = mangle_signature(inp).strip().replace("\u00a0", " ") + assert res == outp, (f"'{inp}' -> '{res}' != '{outp}'") + + +def test_extract_summary(capsys): + settings = Mock(language_code='en', + id_prefix='', + auto_id_prefix='', + pep_reference=False, + rfc_reference=False) + document = new_document('', settings) + + # normal case + doc = ['', + 'This is a first sentence. And second one.', + '', + 'Second block is here'] + assert extract_summary(doc, document) == 'This is a first sentence.' + + # inliner case + doc = ['This sentence contains *emphasis text having dots.*,', + 'it does not break sentence.'] + assert extract_summary(doc, document) == ' '.join(doc) + + # abbreviations + doc = ['Blabla, i.e. bla.'] + assert extract_summary(doc, document) == ' '.join(doc) + + doc = ['Blabla, (i.e. bla).'] + assert extract_summary(doc, document) == ' '.join(doc) + + doc = ['Blabla, e.g. bla.'] + assert extract_summary(doc, document) == ' '.join(doc) + + doc = ['Blabla, (e.g. bla).'] + assert extract_summary(doc, document) == ' '.join(doc) + + doc = ['Blabla, et al. bla.'] + assert extract_summary(doc, document) == ' '.join(doc) + + # literal + doc = ['blah blah::'] + assert extract_summary(doc, document) == 'blah blah.' + + # heading + doc = ['blah blah', + '========='] + assert extract_summary(doc, document) == 'blah blah' + + doc = ['=========', + 'blah blah', + '========='] + assert extract_summary(doc, document) == 'blah blah' + + # hyperlink target + doc = ['Do `this <https://www.sphinx-doc.org/>`_ and that. ' + 'blah blah blah.'] + assert (extract_summary(doc, document) == + 'Do `this <https://www.sphinx-doc.org/>`_ and that.') + + _, err = capsys.readouterr() + assert err == '' + + +@pytest.mark.sphinx('dummy', **default_kw) +def test_get_items_summary(make_app, app_params): + import sphinx.ext.autosummary + import sphinx.ext.autosummary.generate + args, kwargs = app_params + app = make_app(*args, **kwargs) + sphinx.ext.autosummary.generate.setup_documenters(app) + # monkey-patch Autosummary.get_items so we can easily get access to it's + # results.. + orig_get_items = sphinx.ext.autosummary.Autosummary.get_items + + autosummary_items = {} + + def new_get_items(self, names, *args, **kwargs): + results = orig_get_items(self, names, *args, **kwargs) + for name, result in zip(names, results): + autosummary_items[name] = result + return results + + def handler(app, what, name, obj, options, lines): + assert isinstance(lines, list) + + # ensure no docstring is processed twice: + assert 'THIS HAS BEEN HANDLED' not in lines + lines.append('THIS HAS BEEN HANDLED') + app.connect('autodoc-process-docstring', handler) + + sphinx.ext.autosummary.Autosummary.get_items = new_get_items + try: + app.builder.build_all() + finally: + sphinx.ext.autosummary.Autosummary.get_items = orig_get_items + + html_warnings = app._warning.getvalue() + assert html_warnings == '' + + expected_values = { + 'withSentence': 'I have a sentence which spans multiple lines.', + 'noSentence': "this doesn't start with a capital.", + 'emptyLine': "This is the real summary", + 'module_attr': 'This is a module attribute', + 'C.class_attr': 'This is a class attribute', + 'C.instance_attr': 'This is an instance attribute', + 'C.prop_attr1': 'This is a function docstring', + 'C.prop_attr2': 'This is a attribute docstring', + 'C.C2': 'This is a nested inner class docstring', + } + for key, expected in expected_values.items(): + assert autosummary_items[key][2] == expected, 'Summary for %s was %r -'\ + ' expected %r' % (key, autosummary_items[key], expected) + + # check an item in detail + assert 'func' in autosummary_items + func_attrs = ('func', + '(arg_, *args, **kwargs)', + 'Test function take an argument ended with underscore.', + 'dummy_module.func') + assert autosummary_items['func'] == func_attrs + + +def str_content(elem): + if elem.text is not None: + return elem.text + else: + return ''.join(str_content(e) for e in elem) + + +@pytest.mark.sphinx('xml', **default_kw) +def test_escaping(app, status, warning): + app.builder.build_all() + + outdir = Path(app.builder.outdir) + + docpage = outdir / 'underscore_module_.xml' + assert docpage.exists() + + title = etree_parse(docpage).find('section/title') + + assert str_content(title) == 'underscore_module_' + + +@pytest.mark.sphinx(testroot='ext-autosummary') +def test_autosummary_generate_content_for_module(app): + import autosummary_dummy_module + template = Mock() + + generate_autosummary_content('autosummary_dummy_module', autosummary_dummy_module, None, + template, None, False, app, False, {}) + assert template.render.call_args[0][0] == 'module' + + context = template.render.call_args[0][1] + assert context['members'] == ['CONSTANT1', 'CONSTANT2', 'Exc', 'Foo', '_Baz', '_Exc', + '__all__', '__builtins__', '__cached__', '__doc__', + '__file__', '__name__', '__package__', '_quux', 'bar', + 'non_imported_member', 'quuz', 'qux'] + assert context['functions'] == ['bar'] + assert context['all_functions'] == ['_quux', 'bar'] + assert context['classes'] == ['Foo'] + assert context['all_classes'] == ['Foo', '_Baz'] + assert context['exceptions'] == ['Exc'] + assert context['all_exceptions'] == ['Exc', '_Exc'] + assert context['attributes'] == ['CONSTANT1', 'qux', 'quuz', 'non_imported_member'] + assert context['all_attributes'] == ['CONSTANT1', 'qux', 'quuz', 'non_imported_member'] + assert context['fullname'] == 'autosummary_dummy_module' + assert context['module'] == 'autosummary_dummy_module' + assert context['objname'] == '' + assert context['name'] == '' + assert context['objtype'] == 'module' + + +@pytest.mark.sphinx(testroot='ext-autosummary') +def test_autosummary_generate_content_for_module___all__(app): + import autosummary_dummy_module + template = Mock() + app.config.autosummary_ignore_module_all = False + + generate_autosummary_content('autosummary_dummy_module', autosummary_dummy_module, None, + template, None, False, app, False, {}) + assert template.render.call_args[0][0] == 'module' + + context = template.render.call_args[0][1] + assert context['members'] == ['CONSTANT1', 'Exc', 'Foo', '_Baz', 'bar', 'qux', 'path'] + assert context['functions'] == ['bar'] + assert context['all_functions'] == ['bar'] + assert context['classes'] == ['Foo'] + assert context['all_classes'] == ['Foo', '_Baz'] + assert context['exceptions'] == ['Exc'] + assert context['all_exceptions'] == ['Exc'] + assert context['attributes'] == ['CONSTANT1', 'qux'] + assert context['all_attributes'] == ['CONSTANT1', 'qux'] + assert context['fullname'] == 'autosummary_dummy_module' + assert context['module'] == 'autosummary_dummy_module' + assert context['objname'] == '' + assert context['name'] == '' + assert context['objtype'] == 'module' + + +@pytest.mark.sphinx(testroot='ext-autosummary') +def test_autosummary_generate_content_for_module_skipped(app): + import autosummary_dummy_module + template = Mock() + + def skip_member(app, what, name, obj, skip, options): + if name in ('Foo', 'bar', 'Exc'): + return True + return None + + app.connect('autodoc-skip-member', skip_member) + generate_autosummary_content('autosummary_dummy_module', autosummary_dummy_module, None, + template, None, False, app, False, {}) + context = template.render.call_args[0][1] + assert context['members'] == ['CONSTANT1', 'CONSTANT2', '_Baz', '_Exc', '__all__', + '__builtins__', '__cached__', '__doc__', '__file__', + '__name__', '__package__', '_quux', 'non_imported_member', + 'quuz', 'qux'] + assert context['functions'] == [] + assert context['classes'] == [] + assert context['exceptions'] == [] + + +@pytest.mark.sphinx(testroot='ext-autosummary') +def test_autosummary_generate_content_for_module_imported_members(app): + import autosummary_dummy_module + template = Mock() + + generate_autosummary_content('autosummary_dummy_module', autosummary_dummy_module, None, + template, None, True, app, False, {}) + assert template.render.call_args[0][0] == 'module' + + context = template.render.call_args[0][1] + assert context['members'] == ['CONSTANT1', 'CONSTANT2', 'Class', 'Exc', 'Foo', 'Union', + '_Baz', '_Exc', '__all__', '__builtins__', '__cached__', + '__doc__', '__file__', '__loader__', '__name__', + '__package__', '__spec__', '_quux', 'bar', + 'considered_as_imported', 'non_imported_member', 'path', + 'quuz', 'qux'] + assert context['functions'] == ['bar'] + assert context['all_functions'] == ['_quux', 'bar'] + assert context['classes'] == ['Class', 'Foo'] + assert context['all_classes'] == ['Class', 'Foo', '_Baz'] + assert context['exceptions'] == ['Exc'] + assert context['all_exceptions'] == ['Exc', '_Exc'] + assert context['attributes'] == ['CONSTANT1', 'qux', 'quuz', 'non_imported_member'] + assert context['all_attributes'] == ['CONSTANT1', 'qux', 'quuz', 'non_imported_member'] + assert context['fullname'] == 'autosummary_dummy_module' + assert context['module'] == 'autosummary_dummy_module' + assert context['objname'] == '' + assert context['name'] == '' + assert context['objtype'] == 'module' + + +@pytest.mark.sphinx(testroot='ext-autosummary') +def test_autosummary_generate_content_for_module_imported_members_inherited_module(app): + import autosummary_dummy_inherited_module + template = Mock() + + generate_autosummary_content('autosummary_dummy_inherited_module', + autosummary_dummy_inherited_module, None, + template, None, True, app, False, {}) + assert template.render.call_args[0][0] == 'module' + + context = template.render.call_args[0][1] + assert context['members'] == ['Foo', 'InheritedAttrClass', '__all__', '__builtins__', '__cached__', + '__doc__', '__file__', '__loader__', '__name__', + '__package__', '__spec__'] + assert context['functions'] == [] + assert context['classes'] == ['Foo', 'InheritedAttrClass'] + assert context['exceptions'] == [] + assert context['all_exceptions'] == [] + assert context['attributes'] == [] + assert context['all_attributes'] == [] + assert context['fullname'] == 'autosummary_dummy_inherited_module' + assert context['module'] == 'autosummary_dummy_inherited_module' + assert context['objname'] == '' + assert context['name'] == '' + assert context['objtype'] == 'module' + + +@pytest.mark.sphinx('dummy', testroot='ext-autosummary') +def test_autosummary_generate(app, status, warning): + app.builder.build_all() + + doctree = app.env.get_doctree('index') + assert_node(doctree, (nodes.paragraph, + nodes.paragraph, + addnodes.tabular_col_spec, + autosummary_table, + [autosummary_toc, addnodes.toctree])) + assert_node(doctree[3], + [autosummary_table, nodes.table, nodes.tgroup, (nodes.colspec, + nodes.colspec, + [nodes.tbody, (nodes.row, + nodes.row, + nodes.row, + nodes.row, + nodes.row, + nodes.row, + nodes.row, + nodes.row)])]) + assert_node(doctree[4][0], addnodes.toctree, caption="An autosummary") + + assert len(doctree[3][0][0][2]) == 8 + assert doctree[3][0][0][2][0].astext() == 'autosummary_dummy_module\n\n' + assert doctree[3][0][0][2][1].astext() == 'autosummary_dummy_module.Foo()\n\n' + assert doctree[3][0][0][2][2].astext() == 'autosummary_dummy_module.Foo.Bar()\n\n' + assert doctree[3][0][0][2][3].astext() == 'autosummary_dummy_module.Foo.value\n\ndocstring' + assert doctree[3][0][0][2][4].astext() == 'autosummary_dummy_module.bar(x[, y])\n\n' + assert doctree[3][0][0][2][5].astext() == 'autosummary_dummy_module.qux\n\na module-level attribute' + assert doctree[3][0][0][2][6].astext() == 'autosummary_dummy_inherited_module.InheritedAttrClass()\n\n' + assert doctree[3][0][0][2][7].astext() == 'autosummary_dummy_inherited_module.InheritedAttrClass.subclassattr\n\nother docstring' + + module = (app.srcdir / 'generated' / 'autosummary_dummy_module.rst').read_text(encoding='utf8') + + assert (' .. autosummary::\n' + ' \n' + ' Foo\n' + ' \n' in module) + assert (' .. autosummary::\n' + ' \n' + ' CONSTANT1\n' + ' qux\n' + ' quuz\n' + ' non_imported_member\n' + ' \n' in module) + + Foo = (app.srcdir / 'generated' / 'autosummary_dummy_module.Foo.rst').read_text(encoding='utf8') + assert '.. automethod:: __init__' in Foo + assert (' .. autosummary::\n' + ' \n' + ' ~Foo.__init__\n' + ' ~Foo.bar\n' + ' \n' in Foo) + assert (' .. autosummary::\n' + ' \n' + ' ~Foo.CONSTANT3\n' + ' ~Foo.CONSTANT4\n' + ' ~Foo.baz\n' + ' ~Foo.value\n' + ' \n' in Foo) + + FooBar = (app.srcdir / 'generated' / 'autosummary_dummy_module.Foo.Bar.rst').read_text(encoding='utf8') + assert ('.. currentmodule:: autosummary_dummy_module\n' + '\n' + '.. autoclass:: Foo.Bar\n' in FooBar) + + Foo_value = (app.srcdir / 'generated' / 'autosummary_dummy_module.Foo.value.rst').read_text(encoding='utf8') + assert ('.. currentmodule:: autosummary_dummy_module\n' + '\n' + '.. autoattribute:: Foo.value' in Foo_value) + + qux = (app.srcdir / 'generated' / 'autosummary_dummy_module.qux.rst').read_text(encoding='utf8') + assert ('.. currentmodule:: autosummary_dummy_module\n' + '\n' + '.. autodata:: qux' in qux) + + InheritedAttrClass = (app.srcdir / 'generated' / 'autosummary_dummy_inherited_module.InheritedAttrClass.rst').read_text(encoding='utf8') + print(InheritedAttrClass) + assert '.. automethod:: __init__' in Foo + assert (' .. autosummary::\n' + ' \n' + ' ~InheritedAttrClass.__init__\n' + ' ~InheritedAttrClass.bar\n' + ' \n' in InheritedAttrClass) + assert (' .. autosummary::\n' + ' \n' + ' ~InheritedAttrClass.CONSTANT3\n' + ' ~InheritedAttrClass.CONSTANT4\n' + ' ~InheritedAttrClass.baz\n' + ' ~InheritedAttrClass.subclassattr\n' + ' ~InheritedAttrClass.value\n' + ' \n' in InheritedAttrClass) + + InheritedAttrClass_subclassattr = (app.srcdir / 'generated' / 'autosummary_dummy_inherited_module.InheritedAttrClass.subclassattr.rst').read_text(encoding='utf8') + assert ('.. currentmodule:: autosummary_dummy_inherited_module\n' + '\n' + '.. autoattribute:: InheritedAttrClass.subclassattr' in InheritedAttrClass_subclassattr) + + +@pytest.mark.sphinx('dummy', testroot='ext-autosummary', + confoverrides={'autosummary_generate_overwrite': False}) +def test_autosummary_generate_overwrite1(app_params, make_app): + args, kwargs = app_params + srcdir = kwargs.get('srcdir') + + (srcdir / 'generated').mkdir(parents=True, exist_ok=True) + (srcdir / 'generated' / 'autosummary_dummy_module.rst').write_text('', encoding='utf8') + + app = make_app(*args, **kwargs) + content = (srcdir / 'generated' / 'autosummary_dummy_module.rst').read_text(encoding='utf8') + assert content == '' + assert 'autosummary_dummy_module.rst' not in app._warning.getvalue() + + +@pytest.mark.sphinx('dummy', testroot='ext-autosummary', + confoverrides={'autosummary_generate_overwrite': True}) +def test_autosummary_generate_overwrite2(app_params, make_app): + args, kwargs = app_params + srcdir = kwargs.get('srcdir') + + (srcdir / 'generated').mkdir(parents=True, exist_ok=True) + (srcdir / 'generated' / 'autosummary_dummy_module.rst').write_text('', encoding='utf8') + + app = make_app(*args, **kwargs) + content = (srcdir / 'generated' / 'autosummary_dummy_module.rst').read_text(encoding='utf8') + assert content != '' + assert 'autosummary_dummy_module.rst' not in app._warning.getvalue() + + +@pytest.mark.sphinx('dummy', testroot='ext-autosummary-recursive') +@pytest.mark.usefixtures("rollback_sysmodules") +def test_autosummary_recursive(app, status, warning): + sys.modules.pop('package', None) # unload target module to clear the module cache + + app.build() + + # autosummary having :recursive: option + assert (app.srcdir / 'generated' / 'package.rst').exists() + assert (app.srcdir / 'generated' / 'package.module.rst').exists() + assert (app.srcdir / 'generated' / 'package.module_importfail.rst').exists() is False + assert (app.srcdir / 'generated' / 'package.package.rst').exists() + assert (app.srcdir / 'generated' / 'package.package.module.rst').exists() + + # autosummary not having :recursive: option + assert (app.srcdir / 'generated' / 'package2.rst').exists() + assert (app.srcdir / 'generated' / 'package2.module.rst').exists() is False + + # Check content of recursively generated stub-files + content = (app.srcdir / 'generated' / 'package.rst').read_text(encoding='utf8') + assert 'package.module' in content + assert 'package.package' in content + assert 'package.module_importfail' in content + + content = (app.srcdir / 'generated' / 'package.package.rst').read_text(encoding='utf8') + assert 'package.package.module' in content + + +@pytest.mark.sphinx('dummy', testroot='ext-autosummary-recursive', + srcdir='test_autosummary_recursive_skips_mocked_modules', + confoverrides={'autosummary_mock_imports': ['package.package']}) +@pytest.mark.usefixtures("rollback_sysmodules") +def test_autosummary_recursive_skips_mocked_modules(app, status, warning): + sys.modules.pop('package', None) # unload target module to clear the module cache + app.build() + + assert (app.srcdir / 'generated' / 'package.rst').exists() + assert (app.srcdir / 'generated' / 'package.module.rst').exists() + assert (app.srcdir / 'generated' / 'package.package.rst').exists() is False + assert (app.srcdir / 'generated' / 'package.package.module.rst').exists() is False + + +@pytest.mark.sphinx('dummy', testroot='ext-autosummary-filename-map') +def test_autosummary_filename_map(app, status, warning): + app.build() + + assert (app.srcdir / 'generated' / 'module_mangled.rst').exists() + assert not (app.srcdir / 'generated' / 'autosummary_dummy_module.rst').exists() + assert (app.srcdir / 'generated' / 'bar.rst').exists() + assert not (app.srcdir / 'generated' / 'autosummary_dummy_module.bar.rst').exists() + assert (app.srcdir / 'generated' / 'autosummary_dummy_module.Foo.rst').exists() + + html_warnings = app._warning.getvalue() + assert html_warnings == '' + + +@pytest.mark.sphinx('latex', **default_kw) +def test_autosummary_latex_table_colspec(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + print(status.getvalue()) + print(warning.getvalue()) + assert r'\begin{longtable}{\X{1}{2}\X{1}{2}}' in result + assert r'p{0.5\linewidth}' not in result + + +def test_import_by_name(): + import sphinx + import sphinx.ext.autosummary + + prefixed_name, obj, parent, modname = import_by_name('sphinx') + assert prefixed_name == 'sphinx' + assert obj is sphinx + assert parent is None + assert modname == 'sphinx' + + prefixed_name, obj, parent, modname = import_by_name('sphinx.ext.autosummary.__name__') + assert prefixed_name == 'sphinx.ext.autosummary.__name__' + assert obj is sphinx.ext.autosummary.__name__ + assert parent is sphinx.ext.autosummary + assert modname == 'sphinx.ext.autosummary' + + prefixed_name, obj, parent, modname = \ + import_by_name('sphinx.ext.autosummary.Autosummary.get_items') + assert prefixed_name == 'sphinx.ext.autosummary.Autosummary.get_items' + assert obj == sphinx.ext.autosummary.Autosummary.get_items + assert parent is sphinx.ext.autosummary.Autosummary + assert modname == 'sphinx.ext.autosummary' + + +@pytest.mark.sphinx('dummy', testroot='ext-autosummary-mock_imports') +def test_autosummary_mock_imports(app, status, warning): + try: + app.build() + assert warning.getvalue() == '' + + # generated/foo is generated successfully + assert app.env.get_doctree('generated/foo') + finally: + sys.modules.pop('foo', None) # unload foo module + + +@pytest.mark.sphinx('dummy', testroot='ext-autosummary-imported_members') +def test_autosummary_imported_members(app, status, warning): + try: + app.build() + # generated/foo is generated successfully + assert app.env.get_doctree('generated/autosummary_dummy_package') + + module = (app.srcdir / 'generated' / 'autosummary_dummy_package.rst').read_text(encoding='utf8') + assert (' .. autosummary::\n' + ' \n' + ' Bar\n' + ' \n' in module) + assert (' .. autosummary::\n' + ' \n' + ' foo\n' + ' \n' in module) + finally: + sys.modules.pop('autosummary_dummy_package', None) + + +@pytest.mark.sphinx('dummy', testroot='ext-autosummary-module_all') +def test_autosummary_module_all(app, status, warning): + try: + app.build() + # generated/foo is generated successfully + assert app.env.get_doctree('generated/autosummary_dummy_package_all') + module = (app.srcdir / 'generated' / 'autosummary_dummy_package_all.rst').read_text(encoding='utf8') + assert (' .. autosummary::\n' + ' \n' + ' PublicBar\n' + ' \n' in module) + assert (' .. autosummary::\n' + ' \n' + ' public_foo\n' + ' public_baz\n' + ' \n' in module) + assert ('.. autosummary::\n' + ' :toctree:\n' + ' :recursive:\n\n' + ' autosummary_dummy_package_all.extra_dummy_module\n\n' in module) + finally: + sys.modules.pop('autosummary_dummy_package_all', None) + + +@pytest.mark.sphinx(testroot='ext-autodoc', + confoverrides={'extensions': ['sphinx.ext.autosummary']}) +def test_generate_autosummary_docs_property(app): + with patch('sphinx.ext.autosummary.generate.find_autosummary_in_files') as mock: + mock.return_value = [AutosummaryEntry('target.methods.Base.prop', 'prop', None, False)] + generate_autosummary_docs([], output_dir=app.srcdir, app=app) + + content = (app.srcdir / 'target.methods.Base.prop.rst').read_text(encoding='utf8') + assert content == ("target.methods.Base.prop\n" + "========================\n" + "\n" + ".. currentmodule:: target.methods\n" + "\n" + ".. autoproperty:: Base.prop") + + +@pytest.mark.sphinx(testroot='ext-autosummary-skip-member') +def test_autosummary_skip_member(app): + app.build() + + content = (app.srcdir / 'generate' / 'target.Foo.rst').read_text(encoding='utf8') + assert 'Foo.skipmeth' not in content + assert 'Foo._privatemeth' in content + + +@pytest.mark.sphinx(testroot='ext-autosummary-template') +def test_autosummary_template(app): + app.build() + + content = (app.srcdir / 'generate' / 'target.Foo.rst').read_text(encoding='utf8') + assert 'EMPTY' in content + + +@pytest.mark.sphinx('dummy', testroot='ext-autosummary', + confoverrides={'autosummary_generate': []}) +def test_empty_autosummary_generate(app, status, warning): + app.build() + assert ("WARNING: autosummary: failed to import autosummary_importfail" + in warning.getvalue()) + + +@pytest.mark.sphinx('dummy', testroot='ext-autosummary', + confoverrides={'autosummary_generate': ['unknown']}) +def test_invalid_autosummary_generate(app, status, warning): + assert 'WARNING: autosummary_generate: file not found: unknown.rst' in warning.getvalue() + + +def test_autogen(rootdir, tmp_path): + with chdir(rootdir / 'test-templating'): + args = ['-o', str(tmp_path), '-t', '.', 'autosummary_templating.txt'] + autogen_main(args) + assert (tmp_path / 'sphinx.application.TemplateBridge.rst').exists() diff --git a/tests/test_ext_coverage.py b/tests/test_ext_coverage.py new file mode 100644 index 0000000..af8cf53 --- /dev/null +++ b/tests/test_ext_coverage.py @@ -0,0 +1,101 @@ +"""Test the coverage builder.""" + +import pickle + +import pytest + + +@pytest.mark.sphinx('coverage') +def test_build(app, status, warning): + app.builder.build_all() + + py_undoc = (app.outdir / 'python.txt').read_text(encoding='utf8') + assert py_undoc.startswith('Undocumented Python objects\n' + '===========================\n') + assert 'autodoc_target\n--------------\n' in py_undoc + assert ' * Class -- missing methods:\n' in py_undoc + assert ' * raises\n' in py_undoc + assert ' * function\n' not in py_undoc # these two are documented + assert ' * Class\n' not in py_undoc # in autodoc.txt + + assert " * mod -- No module named 'mod'" in py_undoc # in the "failed import" section + + assert "undocumented py" not in status.getvalue() + + c_undoc = (app.outdir / 'c.txt').read_text(encoding='utf8') + assert c_undoc.startswith('Undocumented C API elements\n' + '===========================\n') + assert 'api.h' in c_undoc + assert ' * Py_SphinxTest' in c_undoc + + undoc_py, undoc_c, py_undocumented, py_documented = pickle.loads((app.outdir / 'undoc.pickle').read_bytes()) + assert len(undoc_c) == 1 + # the key is the full path to the header file, which isn't testable + assert list(undoc_c.values())[0] == {('function', 'Py_SphinxTest')} + + assert 'autodoc_target' in undoc_py + assert 'funcs' in undoc_py['autodoc_target'] + assert 'raises' in undoc_py['autodoc_target']['funcs'] + assert 'classes' in undoc_py['autodoc_target'] + assert 'Class' in undoc_py['autodoc_target']['classes'] + assert 'undocmeth' in undoc_py['autodoc_target']['classes']['Class'] + + assert "undocumented c" not in status.getvalue() + + +@pytest.mark.sphinx('coverage', testroot='ext-coverage') +def test_coverage_ignore_pyobjects(app, status, warning): + app.builder.build_all() + actual = (app.outdir / 'python.txt').read_text(encoding='utf8') + expected = '''\ +Undocumented Python objects +=========================== + +Statistics +---------- + ++----------------------+----------+--------------+ +| Module | Coverage | Undocumented | ++======================+==========+==============+ +| coverage_not_ignored | 0.00% | 2 | ++----------------------+----------+--------------+ +| TOTAL | 0.00% | 2 | ++----------------------+----------+--------------+ + +coverage_not_ignored +-------------------- + +Classes: + * Documented -- missing methods: + + - not_ignored1 + - not_ignored2 + * NotIgnored + +''' + assert actual == expected + + +@pytest.mark.sphinx('coverage', confoverrides={'coverage_show_missing_items': True}) +def test_show_missing_items(app, status, warning): + app.builder.build_all() + + assert "undocumented" in status.getvalue() + + assert "py function raises" in status.getvalue() + assert "py class Base" in status.getvalue() + assert "py method Class.roger" in status.getvalue() + + assert "c api Py_SphinxTest [ function]" in status.getvalue() + + +@pytest.mark.sphinx('coverage', confoverrides={'coverage_show_missing_items': True}) +def test_show_missing_items_quiet(app, status, warning): + app.quiet = True + app.builder.build_all() + + assert "undocumented python function: autodoc_target :: raises" in warning.getvalue() + assert "undocumented python class: autodoc_target :: Base" in warning.getvalue() + assert "undocumented python method: autodoc_target :: Class :: roger" in warning.getvalue() + + assert "undocumented c api: Py_SphinxTest [function]" in warning.getvalue() diff --git a/tests/test_ext_doctest.py b/tests/test_ext_doctest.py new file mode 100644 index 0000000..c83e582 --- /dev/null +++ b/tests/test_ext_doctest.py @@ -0,0 +1,136 @@ +"""Test the doctest extension.""" +import os +from collections import Counter + +import pytest +from docutils import nodes +from packaging.specifiers import InvalidSpecifier +from packaging.version import InvalidVersion + +from sphinx.ext.doctest import is_allowed_version + +cleanup_called = 0 + + +@pytest.mark.sphinx('doctest', testroot='ext-doctest') +def test_build(app, status, warning): + global cleanup_called + cleanup_called = 0 + app.builder.build_all() + if app.statuscode != 0: + raise AssertionError('failures in doctests:' + status.getvalue()) + # in doctest.txt, there are two named groups and the default group, + # so the cleanup function must be called three times + assert cleanup_called == 3, 'testcleanup did not get executed enough times' + + +@pytest.mark.sphinx('dummy', testroot='ext-doctest') +def test_highlight_language_default(app, status, warning): + app.build() + doctree = app.env.get_doctree('doctest') + for node in doctree.findall(nodes.literal_block): + assert node['language'] in {'python', 'pycon', 'none'} + + +@pytest.mark.sphinx('dummy', testroot='ext-doctest', + confoverrides={'highlight_language': 'python'}) +def test_highlight_language_python3(app, status, warning): + app.build() + doctree = app.env.get_doctree('doctest') + for node in doctree.findall(nodes.literal_block): + assert node['language'] in {'python', 'pycon', 'none'} + + +def test_is_allowed_version(): + assert is_allowed_version('<3.4', '3.3') is True + assert is_allowed_version('<3.4', '3.3') is True + assert is_allowed_version('<3.2', '3.3') is False + assert is_allowed_version('<=3.4', '3.3') is True + assert is_allowed_version('<=3.2', '3.3') is False + assert is_allowed_version('==3.3', '3.3') is True + assert is_allowed_version('==3.4', '3.3') is False + assert is_allowed_version('>=3.2', '3.3') is True + assert is_allowed_version('>=3.4', '3.3') is False + assert is_allowed_version('>3.2', '3.3') is True + assert is_allowed_version('>3.4', '3.3') is False + assert is_allowed_version('~=3.4', '3.4.5') is True + assert is_allowed_version('~=3.4', '3.5.0') is True + + # invalid spec + with pytest.raises(InvalidSpecifier): + is_allowed_version('&3.4', '3.5') + + # invalid version + with pytest.raises(InvalidVersion): + is_allowed_version('>3.4', 'Sphinx') + + +def cleanup_call(): + global cleanup_called + cleanup_called += 1 + + +recorded_calls = Counter() + + +@pytest.mark.sphinx('doctest', testroot='ext-doctest-skipif') +def test_skipif(app, status, warning): + """Tests for the :skipif: option + + The tests are separated into a different test root directory since the + ``app`` object only evaluates options once in its lifetime. If these tests + were combined with the other doctest tests, the ``:skipif:`` evaluations + would be recorded only on the first ``app.builder.build_all()`` run, i.e. + in ``test_build`` above, and the assertion below would fail. + + """ + global recorded_calls + recorded_calls = Counter() + app.builder.build_all() + if app.statuscode != 0: + raise AssertionError('failures in doctests:' + status.getvalue()) + # The `:skipif:` expressions are always run. + # Actual tests and setup/cleanup code is only run if the `:skipif:` + # expression evaluates to a False value. + # Global setup/cleanup are run before/after evaluating the `:skipif:` + # option in each directive - thus 11 additional invocations for each on top + # of the ones made for the whole test file. + assert recorded_calls == {('doctest_global_setup', 'body', True): 13, + ('testsetup', ':skipif:', True): 1, + ('testsetup', ':skipif:', False): 1, + ('testsetup', 'body', False): 1, + ('doctest', ':skipif:', True): 1, + ('doctest', ':skipif:', False): 1, + ('doctest', 'body', False): 1, + ('testcode', ':skipif:', True): 1, + ('testcode', ':skipif:', False): 1, + ('testcode', 'body', False): 1, + ('testoutput-1', ':skipif:', True): 1, + ('testoutput-2', ':skipif:', True): 1, + ('testoutput-2', ':skipif:', False): 1, + ('testcleanup', ':skipif:', True): 1, + ('testcleanup', ':skipif:', False): 1, + ('testcleanup', 'body', False): 1, + ('doctest_global_cleanup', 'body', True): 13} + + +def record(directive, part, should_skip): + recorded_calls[(directive, part, should_skip)] += 1 + return f'Recorded {directive} {part} {should_skip}' + + +@pytest.mark.sphinx('doctest', testroot='ext-doctest-with-autodoc') +def test_reporting_with_autodoc(app, status, warning, capfd): + # Patch builder to get a copy of the output + written = [] + app.builder._warn_out = written.append + app.builder.build_all() + + failures = [line.replace(os.sep, '/') + for line in '\n'.join(written).splitlines() + if line.startswith('File')] + + assert 'File "dir/inner.rst", line 1, in default' in failures + assert 'File "dir/bar.py", line ?, in default' in failures + assert 'File "foo.py", line ?, in default' in failures + assert 'File "index.rst", line 4, in default' in failures diff --git a/tests/test_ext_duration.py b/tests/test_ext_duration.py new file mode 100644 index 0000000..4fa4dfc --- /dev/null +++ b/tests/test_ext_duration.py @@ -0,0 +1,14 @@ +"""Test sphinx.ext.duration extension.""" + +import re + +import pytest + + +@pytest.mark.sphinx('dummy', testroot='basic', + confoverrides={'extensions': ['sphinx.ext.duration']}) +def test_githubpages(app, status, warning): + app.build() + + assert 'slowest reading durations' in status.getvalue() + assert re.search('\\d+\\.\\d{3} index\n', status.getvalue()) diff --git a/tests/test_ext_extlinks.py b/tests/test_ext_extlinks.py new file mode 100644 index 0000000..7634db6 --- /dev/null +++ b/tests/test_ext_extlinks.py @@ -0,0 +1,45 @@ +import pytest + + +@pytest.mark.sphinx('html', testroot='ext-extlinks-hardcoded-urls', + confoverrides={'extlinks_detect_hardcoded_links': False}) +def test_extlinks_detect_candidates(app, warning): + app.build() + assert warning.getvalue() == '' + + +@pytest.mark.sphinx('html', testroot='ext-extlinks-hardcoded-urls') +def test_replaceable_uris_emit_extlinks_warnings(app, warning): + app.build() + warning_output = warning.getvalue() + + # there should be exactly three warnings for replaceable URLs + message = ( + "index.rst:%d: WARNING: hardcoded link 'https://github.com/sphinx-doc/sphinx/issues/1' " + "could be replaced by an extlink (try using '%s' instead)" + ) + assert message % (11, ":issue:`1`") in warning_output + assert message % (13, ":issue:`inline replaceable link <1>`") in warning_output + assert message % (15, ":issue:`replaceable link <1>`") in warning_output + + +@pytest.mark.sphinx('html', testroot='ext-extlinks-hardcoded-urls-multiple-replacements') +def test_all_replacements_suggested_if_multiple_replacements_possible(app, warning): + app.build() + warning_output = warning.getvalue() + # there should be six warnings for replaceable URLs, three pairs per link + assert warning_output.count("WARNING: hardcoded link") == 6 + message = ( + "index.rst:%d: WARNING: hardcoded link 'https://github.com/octocat' " + "could be replaced by an extlink (try using '%s' instead)" + ) + assert message % (14, ":user:`octocat`") in warning_output + assert message % (16, ":user:`inline replaceable link <octocat>`") in warning_output + assert message % (18, ":user:`replaceable link <octocat>`") in warning_output + message = ( + "index.rst:%d: WARNING: hardcoded link 'https://github.com/octocat' " + "could be replaced by an extlink (try using '%s' instead)" + ) + assert message % (14, ":repo:`octocat`") in warning_output + assert message % (16, ":repo:`inline replaceable link <octocat>`") in warning_output + assert message % (18, ":repo:`replaceable link <octocat>`") in warning_output diff --git a/tests/test_ext_githubpages.py b/tests/test_ext_githubpages.py new file mode 100644 index 0000000..8e41537 --- /dev/null +++ b/tests/test_ext_githubpages.py @@ -0,0 +1,26 @@ +"""Test sphinx.ext.githubpages extension.""" + +import pytest + + +@pytest.mark.sphinx('html', testroot='ext-githubpages') +def test_githubpages(app, status, warning): + app.builder.build_all() + assert (app.outdir / '.nojekyll').exists() + assert not (app.outdir / 'CNAME').exists() + + +@pytest.mark.sphinx('html', testroot='ext-githubpages', + confoverrides={'html_baseurl': 'https://sphinx-doc.github.io'}) +def test_no_cname_for_github_io_domain(app, status, warning): + app.builder.build_all() + assert (app.outdir / '.nojekyll').exists() + assert not (app.outdir / 'CNAME').exists() + + +@pytest.mark.sphinx('html', testroot='ext-githubpages', + confoverrides={'html_baseurl': 'https://sphinx-doc.org'}) +def test_cname_for_custom_domain(app, status, warning): + app.builder.build_all() + assert (app.outdir / '.nojekyll').exists() + assert (app.outdir / 'CNAME').read_text(encoding='utf8') == 'sphinx-doc.org' diff --git a/tests/test_ext_graphviz.py b/tests/test_ext_graphviz.py new file mode 100644 index 0000000..d63dc2a --- /dev/null +++ b/tests/test_ext_graphviz.py @@ -0,0 +1,196 @@ +"""Test sphinx.ext.graphviz extension.""" + +import re +import sys + +import pytest + +from sphinx.ext.graphviz import ClickableMapDefinition + + +@pytest.mark.sphinx('html', testroot='ext-graphviz') +@pytest.mark.usefixtures('if_graphviz_found') +def test_graphviz_png_html(app, status, warning): + app.builder.build_all() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + html = (r'<figure class="align-default" .*?>\s*' + r'<div class="graphviz"><img .*?/></div>\s*<figcaption>\s*' + r'<p><span class="caption-text">caption of graph</span>.*</p>\s*' + r'</figcaption>\s*</figure>') + assert re.search(html, content, re.S) + + html = 'Hello <div class="graphviz"><img .*?/></div>\n graphviz world' + assert re.search(html, content, re.S) + + html = ('<img src=".*?" alt="digraph foo {\nbaz -> qux\n}" ' + 'class="graphviz neato-graph" />') + assert re.search(html, content, re.S) + + html = (r'<figure class="align-right" .*?>\s*' + r'<div class="graphviz"><img .*?/></div>\s*<figcaption>\s*' + r'<p><span class="caption-text">on <em>right</em></span>.*</p>\s*' + r'</figcaption>\s*</figure>') + assert re.search(html, content, re.S) + + html = (r'<div align=\"center\" class=\"align-center\">' + r'<div class="graphviz"><img src=\".*\.png\" alt=\"digraph foo {\n' + r'centered\n' + r'}\" class="graphviz" /></div>\n</div>') + assert re.search(html, content, re.S) + + +@pytest.mark.sphinx('html', testroot='ext-graphviz', + confoverrides={'graphviz_output_format': 'svg'}) +@pytest.mark.usefixtures('if_graphviz_found') +def test_graphviz_svg_html(app, status, warning): + app.builder.build_all() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + + html = (r'<figure class=\"align-default\" .*?>\n' + r'<div class="graphviz"><object data=\".*\.svg\".*>\n' + r'\s*<p class=\"warning\">digraph foo {\n' + r'bar -> baz\n' + r'}</p></object></div>\n' + r'<figcaption>\n' + r'<p><span class=\"caption-text\">caption of graph</span>.*</p>\n' + r'</figcaption>\n' + r'</figure>') + assert re.search(html, content, re.S) + + html = (r'Hello <div class="graphviz"><object.*>\n' + r'\s*<p class=\"warning\">graph</p></object></div>\n' + r' graphviz world') + assert re.search(html, content, re.S) + + html = (r'<figure class=\"align-right\" .*\>\n' + r'<div class="graphviz"><object data=\".*\.svg\".*>\n' + r'\s*<p class=\"warning\">digraph bar {\n' + r'foo -> bar\n' + r'}</p></object></div>\n' + r'<figcaption>\n' + r'<p><span class=\"caption-text\">on <em>right</em></span>.*</p>\n' + r'</figcaption>\n' + r'</figure>') + assert re.search(html, content, re.S) + + html = (r'<div align=\"center\" class=\"align-center\">' + r'<div class="graphviz"><object data=\".*\.svg\".*>\n' + r'\s*<p class=\"warning\">digraph foo {\n' + r'centered\n' + r'}</p></object></div>\n' + r'</div>') + assert re.search(html, content, re.S) + + image_re = r'.*data="([^"]+)".*?digraph test' + image_path_match = re.search(image_re, content, re.S) + assert image_path_match + + image_path = image_path_match.group(1) + image_content = (app.outdir / image_path).read_text(encoding='utf8') + if sys.platform == 'win32': + assert '".\\_static\\' not in image_content + assert r'<ns0:image ns1:href="..\_static\images\test.svg"' in image_content + assert r'<ns0:a ns1:href="..\_static\images\test.svg"' in image_content + else: + assert '"./_static/' not in image_content + assert '<ns0:image ns1:href="../_static/images/test.svg"' in image_content + assert '<ns0:a ns1:href="../_static/images/test.svg"' in image_content + assert '<ns0:a ns1:href="..#graphviz"' in image_content + + +@pytest.mark.sphinx('latex', testroot='ext-graphviz') +@pytest.mark.usefixtures('if_graphviz_found') +def test_graphviz_latex(app, status, warning): + app.builder.build_all() + + content = (app.outdir / 'python.tex').read_text(encoding='utf8') + macro = ('\\\\begin{figure}\\[htbp\\]\n\\\\centering\n\\\\capstart\n\n' + '\\\\sphinxincludegraphics\\[\\]{graphviz-\\w+.pdf}\n' + '\\\\caption{caption of graph}\\\\label{.*}\\\\end{figure}') + assert re.search(macro, content, re.S) + + macro = 'Hello \\\\sphinxincludegraphics\\[\\]{graphviz-\\w+.pdf} graphviz world' + assert re.search(macro, content, re.S) + + macro = ('\\\\begin{wrapfigure}{r}{0pt}\n\\\\centering\n' + '\\\\sphinxincludegraphics\\[\\]{graphviz-\\w+.pdf}\n' + '\\\\caption{on \\\\sphinxstyleemphasis{right}}' + '\\\\label{.*}\\\\end{wrapfigure}') + assert re.search(macro, content, re.S) + + macro = (r'\{\\hfill' + r'\\sphinxincludegraphics\[\]{graphviz-.*}' + r'\\hspace\*{\\fill}}') + assert re.search(macro, content, re.S) + + +@pytest.mark.sphinx('html', testroot='ext-graphviz', confoverrides={'language': 'xx'}) +@pytest.mark.usefixtures('if_graphviz_found') +def test_graphviz_i18n(app, status, warning): + app.builder.build_all() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + html = '<img src=".*?" alt="digraph {\n BAR -> BAZ\n}" class="graphviz" />' + assert re.search(html, content, re.M) + + +def test_graphviz_parse_mapfile(): + # empty graph + code = ('# digraph {\n' + '# }\n') + content = ('<map id="%3" name="%3">\n' + '</map>') + cmap = ClickableMapDefinition('dummy.map', content, code) + assert cmap.filename == 'dummy.map' + assert cmap.id == 'grapvizb08107169e' + assert len(cmap.clickable) == 0 + assert cmap.generate_clickable_map() == '' + + # normal graph + code = ('digraph {\n' + ' foo [href="http://www.google.com/"];\n' + ' foo -> bar;\n' + '}\n') + content = ('<map id="%3" name="%3">\n' + '<area shape="poly" id="node1" href="http://www.google.com/" title="foo" alt=""' + ' coords="77,29,76,22,70,15,62,10,52,7,41,5,30,7,20,10,12,15,7,22,5,29,7,37,12,' + '43,20,49,30,52,41,53,52,52,62,49,70,43,76,37"/>\n' + '</map>') + cmap = ClickableMapDefinition('dummy.map', content, code) + assert cmap.filename == 'dummy.map' + assert cmap.id == 'grapviza4ccdd48ce' + assert len(cmap.clickable) == 1 + assert cmap.generate_clickable_map() == content.replace('%3', cmap.id) + + # inheritance-diagram:: sphinx.builders.html + content = ( + '<map id="inheritance66ff5471b9" name="inheritance66ff5471b9">\n' + '<area shape="rect" id="node1" title="Builds target formats from the reST sources."' + ' alt="" coords="26,95,125,110"/>\n' + '<area shape="rect" id="node5" title="Builds standalone HTML docs."' + ' alt="" coords="179,95,362,110"/>\n' + '<area shape="rect" id="node2" title="buildinfo file manipulator." ' + ' alt="" coords="14,64,138,80"/>\n' + '<area shape="rect" id="node3" title="The container of stylesheets."' + ' alt="" coords="3,34,148,49"/>\n' + '<area shape="rect" id="node4" title="A StandaloneHTMLBuilder that creates all HTML' + ' pages as "index.html" in" alt="" coords="395,64,569,80"/>\n' + '<area shape="rect" id="node7" title="An abstract builder that serializes' + ' the generated HTML." alt="" coords="392,95,571,110"/>\n' + '<area shape="rect" id="node9" title="A StandaloneHTMLBuilder subclass that puts' + ' the whole document tree on one" alt="" coords="393,125,570,141"/>\n' + '<area shape="rect" id="node6" title="A builder that dumps the generated HTML' + ' into JSON files." alt="" coords="602,80,765,95"/>\n' + '<area shape="rect" id="node8" title="A Builder that dumps the generated HTML' + ' into pickle files." alt="" coords="602,110,765,125"/>\n' + '<area shape="rect" id="node10" title="The metadata of stylesheet."' + ' alt="" coords="11,3,141,19"/>\n' + '</map>' + ) + cmap = ClickableMapDefinition('dummy.map', content, 'dummy_code') + assert cmap.filename == 'dummy.map' + assert cmap.id == 'inheritance66ff5471b9' + assert len(cmap.clickable) == 0 + assert cmap.generate_clickable_map() == '' diff --git a/tests/test_ext_ifconfig.py b/tests/test_ext_ifconfig.py new file mode 100644 index 0000000..0292699 --- /dev/null +++ b/tests/test_ext_ifconfig.py @@ -0,0 +1,28 @@ +"""Test sphinx.ext.ifconfig extension.""" + +import docutils.utils +import pytest + +from sphinx import addnodes +from sphinx.testing import restructuredtext + + +@pytest.mark.sphinx('text', testroot='ext-ifconfig') +def test_ifconfig(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'index.txt').read_text(encoding='utf8') + assert 'spam' in result + assert 'ham' not in result + + +def test_ifconfig_content_line_number(app): + app.setup_extension("sphinx.ext.ifconfig") + text = (".. ifconfig:: confval1\n" + + "\n" + + " Some link here: :ref:`abc`\n") + doc = restructuredtext.parse(app, text) + xrefs = list(doc.findall(condition=addnodes.pending_xref)) + assert len(xrefs) == 1 + source, line = docutils.utils.get_source_line(xrefs[0]) + assert 'index.rst' in source + assert line == 3 diff --git a/tests/test_ext_imgconverter.py b/tests/test_ext_imgconverter.py new file mode 100644 index 0000000..18be700 --- /dev/null +++ b/tests/test_ext_imgconverter.py @@ -0,0 +1,34 @@ +"""Test sphinx.ext.imgconverter extension.""" + +import subprocess + +import pytest + + +@pytest.fixture() +def _if_converter_found(app): + image_converter = getattr(app.config, 'image_converter', '') + try: + if image_converter: + subprocess.run([image_converter, '-version'], capture_output=True) # show version + return + except OSError: # No such file or directory + pass + + pytest.skip('image_converter "%s" is not available' % image_converter) + + +@pytest.mark.usefixtures('_if_converter_found') +@pytest.mark.sphinx('latex', testroot='ext-imgconverter') +def test_ext_imgconverter(app, status, warning): + app.builder.build_all() + + content = (app.outdir / 'python.tex').read_text(encoding='utf8') + + # supported image (not converted) + assert '\\sphinxincludegraphics{{img}.pdf}' in content + + # non supported image (converted) + assert '\\sphinxincludegraphics{{svgimg}.png}' in content + assert not (app.outdir / 'svgimg.svg').exists() + assert (app.outdir / 'svgimg.png').exists() diff --git a/tests/test_ext_imgmockconverter.py b/tests/test_ext_imgmockconverter.py new file mode 100644 index 0000000..b5d4e79 --- /dev/null +++ b/tests/test_ext_imgmockconverter.py @@ -0,0 +1,17 @@ +"""Test image converter with identical basenames""" + +import pytest + + +@pytest.mark.sphinx('latex', testroot='ext-imgmockconverter') +def test_ext_imgmockconverter(app, status, warning): + app.builder.build_all() + + content = (app.outdir / 'python.tex').read_text(encoding='utf8') + + # check identical basenames give distinct files + assert '\\sphinxincludegraphics{{svgimg}.pdf}' in content + assert '\\sphinxincludegraphics{{svgimg1}.pdf}' in content + assert not (app.outdir / 'svgimg.svg').exists() + assert (app.outdir / 'svgimg.pdf').exists() + assert (app.outdir / 'svgimg1.pdf').exists() diff --git a/tests/test_ext_inheritance_diagram.py b/tests/test_ext_inheritance_diagram.py new file mode 100644 index 0000000..9ace5ad --- /dev/null +++ b/tests/test_ext_inheritance_diagram.py @@ -0,0 +1,342 @@ +"""Test sphinx.ext.inheritance_diagram extension.""" + +import os +import re +import sys +import zlib + +import pytest + +from sphinx.ext.inheritance_diagram import ( + InheritanceDiagram, + InheritanceException, + import_classes, +) +from sphinx.ext.intersphinx import load_mappings, normalize_intersphinx_mapping + + +@pytest.mark.sphinx(buildername="html", testroot="inheritance") +@pytest.mark.usefixtures('if_graphviz_found') +def test_inheritance_diagram(app, status, warning): + # monkey-patch InheritaceDiagram.run() so we can get access to its + # results. + orig_run = InheritanceDiagram.run + graphs = {} + + def new_run(self): + result = orig_run(self) + node = result[0] + source = os.path.basename(node.document.current_source).replace(".rst", "") + graphs[source] = node['graph'] + return result + + InheritanceDiagram.run = new_run + + try: + app.builder.build_all() + finally: + InheritanceDiagram.run = orig_run + + assert app.statuscode == 0 + + html_warnings = warning.getvalue() + assert html_warnings == "" + + # note: it is better to split these asserts into separate test functions + # but I can't figure out how to build only a specific .rst file + + # basic inheritance diagram showing all classes + for cls in graphs['basic_diagram'].class_info: + # use in b/c traversing order is different sometimes + assert cls in [ + ('dummy.test.A', 'dummy.test.A', [], None), + ('dummy.test.F', 'dummy.test.F', ['dummy.test.C'], None), + ('dummy.test.C', 'dummy.test.C', ['dummy.test.A'], None), + ('dummy.test.E', 'dummy.test.E', ['dummy.test.B'], None), + ('dummy.test.D', 'dummy.test.D', ['dummy.test.B', 'dummy.test.C'], None), + ('dummy.test.B', 'dummy.test.B', ['dummy.test.A'], None), + ] + + # inheritance diagram using :parts: 1 option + for cls in graphs['diagram_w_parts'].class_info: + assert cls in [ + ('A', 'dummy.test.A', [], None), + ('F', 'dummy.test.F', ['C'], None), + ('C', 'dummy.test.C', ['A'], None), + ('E', 'dummy.test.E', ['B'], None), + ('D', 'dummy.test.D', ['B', 'C'], None), + ('B', 'dummy.test.B', ['A'], None), + ] + + # inheritance diagram with 1 top class + # :top-classes: dummy.test.B + # rendering should be + # A + # \ + # B C + # / \ / \ + # E D F + # + for cls in graphs['diagram_w_1_top_class'].class_info: + assert cls in [ + ('dummy.test.A', 'dummy.test.A', [], None), + ('dummy.test.F', 'dummy.test.F', ['dummy.test.C'], None), + ('dummy.test.C', 'dummy.test.C', ['dummy.test.A'], None), + ('dummy.test.E', 'dummy.test.E', ['dummy.test.B'], None), + ('dummy.test.D', 'dummy.test.D', ['dummy.test.B', 'dummy.test.C'], None), + ('dummy.test.B', 'dummy.test.B', [], None), + ] + + # inheritance diagram with 2 top classes + # :top-classes: dummy.test.B, dummy.test.C + # Note: we're specifying separate classes, not the entire module here + # rendering should be + # + # B C + # / \ / \ + # E D F + # + for cls in graphs['diagram_w_2_top_classes'].class_info: + assert cls in [ + ('dummy.test.F', 'dummy.test.F', ['dummy.test.C'], None), + ('dummy.test.C', 'dummy.test.C', [], None), + ('dummy.test.E', 'dummy.test.E', ['dummy.test.B'], None), + ('dummy.test.D', 'dummy.test.D', ['dummy.test.B', 'dummy.test.C'], None), + ('dummy.test.B', 'dummy.test.B', [], None), + ] + + # inheritance diagram with 2 top classes and specifying the entire module + # rendering should be + # + # A + # B C + # / \ / \ + # E D F + # + # Note: dummy.test.A is included in the graph before its descendants are even processed + # b/c we've specified to load the entire module. The way InheritanceGraph works it is very + # hard to exclude parent classes once after they have been included in the graph. + # If you'd like to not show class A in the graph don't specify the entire module. + # this is a known issue. + for cls in graphs['diagram_module_w_2_top_classes'].class_info: + assert cls in [ + ('dummy.test.F', 'dummy.test.F', ['dummy.test.C'], None), + ('dummy.test.C', 'dummy.test.C', [], None), + ('dummy.test.E', 'dummy.test.E', ['dummy.test.B'], None), + ('dummy.test.D', 'dummy.test.D', ['dummy.test.B', 'dummy.test.C'], None), + ('dummy.test.B', 'dummy.test.B', [], None), + ('dummy.test.A', 'dummy.test.A', [], None), + ] + + # inheritance diagram involving a base class nested within another class + for cls in graphs['diagram_w_nested_classes'].class_info: + assert cls in [ + ('dummy.test_nested.A', 'dummy.test_nested.A', [], None), + ('dummy.test_nested.C', 'dummy.test_nested.C', ['dummy.test_nested.A.B'], None), + ('dummy.test_nested.A.B', 'dummy.test_nested.A.B', [], None), + ] + + +# An external inventory to test intersphinx links in inheritance diagrams +external_inventory = b'''\ +# Sphinx inventory version 2 +# Project: external +# Version: 1.0 +# The remainder of this file is compressed using zlib. +''' + zlib.compress(b'''\ +external.other.Bob py:class 1 foo.html#external.other.Bob - +''') + + +@pytest.mark.sphinx('html', testroot='ext-inheritance_diagram') +@pytest.mark.usefixtures('if_graphviz_found') +def test_inheritance_diagram_png_html(tmp_path, app): + inv_file = tmp_path / 'inventory' + inv_file.write_bytes(external_inventory) + app.config.intersphinx_mapping = { + 'https://example.org': str(inv_file), + } + app.config.intersphinx_cache_limit = 0 + normalize_intersphinx_mapping(app, app.config) + load_mappings(app) + + app.builder.build_all() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + base_maps = re.findall('<map .+\n.+\n</map>', content) + + pattern = ('<figure class="align-default" id="id1">\n' + '<div class="graphviz">' + '<img src="_images/inheritance-\\w+.png" alt="Inheritance diagram of test.Foo" ' + 'class="inheritance graphviz" /></div>\n<figcaption>\n<p>' + '<span class="caption-text">Test Foo!</span><a class="headerlink" href="#id1" ' + 'title="Link to this image">\xb6</a></p>\n</figcaption>\n</figure>\n') + assert re.search(pattern, content, re.M) + + subdir_content = (app.outdir / 'subdir/page1.html').read_text(encoding='utf8') + subdir_maps = re.findall('<map .+\n.+\n</map>', subdir_content) + subdir_maps = [re.sub('href="(\\S+)"', 'href="subdir/\\g<1>"', s) for s in subdir_maps] + + # Go through the clickmap for every PNG inheritance diagram + for diagram_content in base_maps + subdir_maps: + # Verify that an intersphinx link was created via the external inventory + if 'subdir.' in diagram_content: + assert "https://example.org" in diagram_content + + # Extract every link in the inheritance diagram + for href in re.findall('href="(\\S+?)"', diagram_content): + if '://' in href: + # Verify that absolute URLs are not prefixed with ../ + assert href.startswith("https://example.org/") + else: + # Verify that relative URLs point to existing documents + reluri = href.rsplit('#', 1)[0] # strip the anchor at the end + assert (app.outdir / reluri).exists() + + +@pytest.mark.sphinx('html', testroot='ext-inheritance_diagram', + confoverrides={'graphviz_output_format': 'svg'}) +@pytest.mark.usefixtures('if_graphviz_found') +def test_inheritance_diagram_svg_html(tmp_path, app): + inv_file = tmp_path / 'inventory' + inv_file.write_bytes(external_inventory) + app.config.intersphinx_mapping = { + "subdir": ('https://example.org', str(inv_file)), + } + app.config.intersphinx_cache_limit = 0 + normalize_intersphinx_mapping(app, app.config) + load_mappings(app) + + app.builder.build_all() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + base_svgs = re.findall('<object data="(_images/inheritance-\\w+.svg?)"', content) + + pattern = ('<figure class="align-default" id="id1">\n' + '<div class="graphviz">' + '<object data="_images/inheritance-\\w+.svg" ' + 'type="image/svg\\+xml" class="inheritance graphviz">\n' + '<p class=\"warning\">Inheritance diagram of test.Foo</p>' + '</object></div>\n<figcaption>\n<p><span class="caption-text">' + 'Test Foo!</span><a class="headerlink" href="#id1" ' + 'title="Link to this image">\xb6</a></p>\n</figcaption>\n</figure>\n') + + assert re.search(pattern, content, re.M) + + subdir_content = (app.outdir / 'subdir/page1.html').read_text(encoding='utf8') + subdir_svgs = re.findall('<object data="../(_images/inheritance-\\w+.svg?)"', subdir_content) + + # Go through every SVG inheritance diagram + for diagram in base_svgs + subdir_svgs: + diagram_content = (app.outdir / diagram).read_text(encoding='utf8') + + # Verify that an intersphinx link was created via the external inventory + if 'subdir.' in diagram_content: + assert "https://example.org" in diagram_content + + # Extract every link in the inheritance diagram + for href in re.findall('href="(\\S+?)"', diagram_content): + if '://' in href: + # Verify that absolute URLs are not prefixed with ../ + assert href.startswith("https://example.org/") + else: + # Verify that relative URLs point to existing documents + reluri = href.rsplit('#', 1)[0] # strip the anchor at the end + abs_uri = (app.outdir / app.builder.imagedir / reluri).resolve() + assert abs_uri.exists() + + +@pytest.mark.sphinx('latex', testroot='ext-inheritance_diagram') +@pytest.mark.usefixtures('if_graphviz_found') +def test_inheritance_diagram_latex(app, status, warning): + app.builder.build_all() + + content = (app.outdir / 'python.tex').read_text(encoding='utf8') + + pattern = ('\\\\begin{figure}\\[htbp]\n\\\\centering\n\\\\capstart\n\n' + '\\\\sphinxincludegraphics\\[\\]{inheritance-\\w+.pdf}\n' + '\\\\caption{Test Foo!}\\\\label{\\\\detokenize{index:id1}}\\\\end{figure}') + assert re.search(pattern, content, re.M) + + +@pytest.mark.sphinx('html', testroot='ext-inheritance_diagram', + srcdir='ext-inheritance_diagram-alias') +@pytest.mark.usefixtures('if_graphviz_found') +def test_inheritance_diagram_latex_alias(app, status, warning): + app.config.inheritance_alias = {'test.Foo': 'alias.Foo'} + app.builder.build_all() + + doc = app.env.get_and_resolve_doctree('index', app) + aliased_graph = doc.children[0].children[3]['graph'].class_info + assert len(aliased_graph) == 4 + assert ('test.DocSubDir2', 'test.DocSubDir2', ['test.DocSubDir1'], None) in aliased_graph + assert ('test.DocSubDir1', 'test.DocSubDir1', ['test.DocHere'], None) in aliased_graph + assert ('test.DocHere', 'test.DocHere', ['alias.Foo'], None) in aliased_graph + assert ('alias.Foo', 'alias.Foo', [], None) in aliased_graph + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + + pattern = ('<figure class="align-default" id="id1">\n' + '<div class="graphviz">' + '<img src="_images/inheritance-\\w+.png" alt="Inheritance diagram of test.Foo" ' + 'class="inheritance graphviz" /></div>\n<figcaption>\n<p>' + '<span class="caption-text">Test Foo!</span><a class="headerlink" href="#id1" ' + 'title="Link to this image">\xb6</a></p>\n</figcaption>\n</figure>\n') + assert re.search(pattern, content, re.M) + + +def test_import_classes(rootdir): + from sphinx.parsers import Parser, RSTParser + from sphinx.util.i18n import CatalogInfo + + try: + sys.path.append(str(rootdir / 'test-ext-inheritance_diagram')) + from example.sphinx import DummyClass + + # got exception for unknown class or module + with pytest.raises(InheritanceException): + import_classes('unknown', None) + with pytest.raises(InheritanceException): + import_classes('unknown.Unknown', None) + + # got exception InheritanceException for wrong class or module + # not AttributeError (refs: #4019) + with pytest.raises(InheritanceException): + import_classes('unknown', '.') + with pytest.raises(InheritanceException): + import_classes('unknown.Unknown', '.') + with pytest.raises(InheritanceException): + import_classes('.', None) + + # a module having no classes + classes = import_classes('sphinx', None) + assert classes == [] + + classes = import_classes('sphinx', 'foo') + assert classes == [] + + # all of classes in the module + classes = import_classes('sphinx.parsers', None) + assert set(classes) == {Parser, RSTParser} + + # specified class in the module + classes = import_classes('sphinx.parsers.Parser', None) + assert classes == [Parser] + + # specified class in current module + classes = import_classes('Parser', 'sphinx.parsers') + assert classes == [Parser] + + # relative module name to current module + classes = import_classes('i18n.CatalogInfo', 'sphinx.util') + assert classes == [CatalogInfo] + + # got exception for functions + with pytest.raises(InheritanceException): + import_classes('encode_uri', 'sphinx.util') + + # import submodule on current module (refs: #3164) + classes = import_classes('sphinx', 'example') + assert classes == [DummyClass] + finally: + sys.path.pop() diff --git a/tests/test_ext_intersphinx.py b/tests/test_ext_intersphinx.py new file mode 100644 index 0000000..82bec9e --- /dev/null +++ b/tests/test_ext_intersphinx.py @@ -0,0 +1,568 @@ +"""Test the intersphinx extension.""" + +import http.server +from unittest import mock + +import pytest +from docutils import nodes + +from sphinx import addnodes +from sphinx.ext.intersphinx import ( + INVENTORY_FILENAME, + _get_safe_url, + _strip_basic_auth, + fetch_inventory, + inspect_main, + load_mappings, + missing_reference, + normalize_intersphinx_mapping, +) +from sphinx.ext.intersphinx import setup as intersphinx_setup + +from .test_util_inventory import inventory_v2, inventory_v2_not_having_version +from .utils import http_server + + +def fake_node(domain, type, target, content, **attrs): + contnode = nodes.emphasis(content, content) + node = addnodes.pending_xref('') + node['reftarget'] = target + node['reftype'] = type + node['refdomain'] = domain + node.attributes.update(attrs) + node += contnode + return node, contnode + + +def reference_check(app, *args, **kwds): + node, contnode = fake_node(*args, **kwds) + return missing_reference(app, app.env, node, contnode) + + +def set_config(app, mapping): + app.config.intersphinx_mapping = mapping + app.config.intersphinx_cache_limit = 0 + app.config.intersphinx_disabled_reftypes = [] + + +@mock.patch('sphinx.ext.intersphinx.InventoryFile') +@mock.patch('sphinx.ext.intersphinx._read_from_url') +def test_fetch_inventory_redirection(_read_from_url, InventoryFile, app, status, warning): # NoQA: PT019 + intersphinx_setup(app) + _read_from_url().readline.return_value = b'# Sphinx inventory version 2' + + # same uri and inv, not redirected + _read_from_url().url = 'http://hostname/' + INVENTORY_FILENAME + fetch_inventory(app, 'http://hostname/', 'http://hostname/' + INVENTORY_FILENAME) + assert 'intersphinx inventory has moved' not in status.getvalue() + assert InventoryFile.load.call_args[0][1] == 'http://hostname/' + + # same uri and inv, redirected + status.seek(0) + status.truncate(0) + _read_from_url().url = 'http://hostname/new/' + INVENTORY_FILENAME + + fetch_inventory(app, 'http://hostname/', 'http://hostname/' + INVENTORY_FILENAME) + assert status.getvalue() == ('intersphinx inventory has moved: ' + 'http://hostname/%s -> http://hostname/new/%s\n' % + (INVENTORY_FILENAME, INVENTORY_FILENAME)) + assert InventoryFile.load.call_args[0][1] == 'http://hostname/new' + + # different uri and inv, not redirected + status.seek(0) + status.truncate(0) + _read_from_url().url = 'http://hostname/new/' + INVENTORY_FILENAME + + fetch_inventory(app, 'http://hostname/', 'http://hostname/new/' + INVENTORY_FILENAME) + assert 'intersphinx inventory has moved' not in status.getvalue() + assert InventoryFile.load.call_args[0][1] == 'http://hostname/' + + # different uri and inv, redirected + status.seek(0) + status.truncate(0) + _read_from_url().url = 'http://hostname/other/' + INVENTORY_FILENAME + + fetch_inventory(app, 'http://hostname/', 'http://hostname/new/' + INVENTORY_FILENAME) + assert status.getvalue() == ('intersphinx inventory has moved: ' + 'http://hostname/new/%s -> http://hostname/other/%s\n' % + (INVENTORY_FILENAME, INVENTORY_FILENAME)) + assert InventoryFile.load.call_args[0][1] == 'http://hostname/' + + +def test_missing_reference(tmp_path, app, status, warning): + inv_file = tmp_path / 'inventory' + inv_file.write_bytes(inventory_v2) + set_config(app, { + 'https://docs.python.org/': str(inv_file), + 'py3k': ('https://docs.python.org/py3k/', str(inv_file)), + 'py3krel': ('py3k', str(inv_file)), # relative path + 'py3krelparent': ('../../py3k', str(inv_file)), # relative path, parent dir + }) + + # load the inventory and check if it's done correctly + normalize_intersphinx_mapping(app, app.config) + load_mappings(app) + inv = app.env.intersphinx_inventory + + assert inv['py:module']['module2'] == \ + ('foo', '2.0', 'https://docs.python.org/foo.html#module-module2', '-') + + # check resolution when a target is found + rn = reference_check(app, 'py', 'func', 'module1.func', 'foo') + assert isinstance(rn, nodes.reference) + assert rn['refuri'] == 'https://docs.python.org/sub/foo.html#module1.func' + assert rn['reftitle'] == '(in foo v2.0)' + assert rn[0].astext() == 'foo' + + # create unresolvable nodes and check None return value + assert reference_check(app, 'py', 'foo', 'module1.func', 'foo') is None + assert reference_check(app, 'py', 'func', 'foo', 'foo') is None + assert reference_check(app, 'py', 'func', 'foo', 'foo') is None + + # check handling of prefixes + + # prefix given, target found: prefix is stripped + rn = reference_check(app, 'py', 'mod', 'py3k:module2', 'py3k:module2') + assert rn[0].astext() == 'module2' + + # prefix given, but not in title: nothing stripped + rn = reference_check(app, 'py', 'mod', 'py3k:module2', 'module2') + assert rn[0].astext() == 'module2' + + # prefix given, but explicit: nothing stripped + rn = reference_check(app, 'py', 'mod', 'py3k:module2', 'py3k:module2', + refexplicit=True) + assert rn[0].astext() == 'py3k:module2' + + # prefix given, target not found and nonexplicit title: prefix is not stripped + node, contnode = fake_node('py', 'mod', 'py3k:unknown', 'py3k:unknown', + refexplicit=False) + rn = missing_reference(app, app.env, node, contnode) + assert rn is None + assert contnode[0].astext() == 'py3k:unknown' + + # prefix given, target not found and explicit title: nothing is changed + node, contnode = fake_node('py', 'mod', 'py3k:unknown', 'py3k:unknown', + refexplicit=True) + rn = missing_reference(app, app.env, node, contnode) + assert rn is None + assert contnode[0].astext() == 'py3k:unknown' + + # check relative paths + rn = reference_check(app, 'py', 'mod', 'py3krel:module1', 'foo') + assert rn['refuri'] == 'py3k/foo.html#module-module1' + + rn = reference_check(app, 'py', 'mod', 'py3krelparent:module1', 'foo') + assert rn['refuri'] == '../../py3k/foo.html#module-module1' + + rn = reference_check(app, 'py', 'mod', 'py3krel:module1', 'foo', refdoc='sub/dir/test') + assert rn['refuri'] == '../../py3k/foo.html#module-module1' + + rn = reference_check(app, 'py', 'mod', 'py3krelparent:module1', 'foo', + refdoc='sub/dir/test') + assert rn['refuri'] == '../../../../py3k/foo.html#module-module1' + + # check refs of standard domain + rn = reference_check(app, 'std', 'doc', 'docname', 'docname') + assert rn['refuri'] == 'https://docs.python.org/docname.html' + + +def test_missing_reference_pydomain(tmp_path, app, status, warning): + inv_file = tmp_path / 'inventory' + inv_file.write_bytes(inventory_v2) + set_config(app, { + 'https://docs.python.org/': str(inv_file), + }) + + # load the inventory and check if it's done correctly + normalize_intersphinx_mapping(app, app.config) + load_mappings(app) + + # no context data + kwargs = {} + node, contnode = fake_node('py', 'func', 'func', 'func()', **kwargs) + rn = missing_reference(app, app.env, node, contnode) + assert rn is None + + # py:module context helps to search objects + kwargs = {'py:module': 'module1'} + node, contnode = fake_node('py', 'func', 'func', 'func()', **kwargs) + rn = missing_reference(app, app.env, node, contnode) + assert rn.astext() == 'func()' + + # py:attr context helps to search objects + kwargs = {'py:module': 'module1'} + node, contnode = fake_node('py', 'attr', 'Foo.bar', 'Foo.bar', **kwargs) + rn = missing_reference(app, app.env, node, contnode) + assert rn.astext() == 'Foo.bar' + + # term reference (normal) + node, contnode = fake_node('std', 'term', 'a term', 'a term') + rn = missing_reference(app, app.env, node, contnode) + assert rn.astext() == 'a term' + + # term reference (case insensitive) + node, contnode = fake_node('std', 'term', 'A TERM', 'A TERM') + rn = missing_reference(app, app.env, node, contnode) + assert rn.astext() == 'A TERM' + + +def test_missing_reference_stddomain(tmp_path, app, status, warning): + inv_file = tmp_path / 'inventory' + inv_file.write_bytes(inventory_v2) + set_config(app, { + 'cmd': ('https://docs.python.org/', str(inv_file)), + }) + + # load the inventory and check if it's done correctly + normalize_intersphinx_mapping(app, app.config) + load_mappings(app) + + # no context data + kwargs = {} + node, contnode = fake_node('std', 'option', '-l', '-l', **kwargs) + rn = missing_reference(app, app.env, node, contnode) + assert rn is None + + # std:program context helps to search objects + kwargs = {'std:program': 'ls'} + node, contnode = fake_node('std', 'option', '-l', 'ls -l', **kwargs) + rn = missing_reference(app, app.env, node, contnode) + assert rn.astext() == 'ls -l' + + # refers inventory by name + kwargs = {} + node, contnode = fake_node('std', 'option', 'cmd:ls -l', '-l', **kwargs) + rn = missing_reference(app, app.env, node, contnode) + assert rn.astext() == '-l' + + +@pytest.mark.sphinx('html', testroot='ext-intersphinx-cppdomain') +def test_missing_reference_cppdomain(tmp_path, app, status, warning): + inv_file = tmp_path / 'inventory' + inv_file.write_bytes(inventory_v2) + set_config(app, { + 'https://docs.python.org/': str(inv_file), + }) + + # load the inventory and check if it's done correctly + normalize_intersphinx_mapping(app, app.config) + load_mappings(app) + + app.build() + html = (app.outdir / 'index.html').read_text(encoding='utf8') + assert ('<a class="reference external"' + ' href="https://docs.python.org/index.html#cpp_foo_bar"' + ' title="(in foo v2.0)">' + '<code class="xref cpp cpp-class docutils literal notranslate">' + '<span class="pre">Bar</span></code></a>' in html) + assert ('<a class="reference external"' + ' href="https://docs.python.org/index.html#foons"' + ' title="(in foo v2.0)"><span class="n"><span class="pre">foons</span></span></a>' in html) + assert ('<a class="reference external"' + ' href="https://docs.python.org/index.html#foons_bartype"' + ' title="(in foo v2.0)"><span class="n"><span class="pre">bartype</span></span></a>' in html) + + +def test_missing_reference_jsdomain(tmp_path, app, status, warning): + inv_file = tmp_path / 'inventory' + inv_file.write_bytes(inventory_v2) + set_config(app, { + 'https://docs.python.org/': str(inv_file), + }) + + # load the inventory and check if it's done correctly + normalize_intersphinx_mapping(app, app.config) + load_mappings(app) + + # no context data + kwargs = {} + node, contnode = fake_node('js', 'meth', 'baz', 'baz()', **kwargs) + rn = missing_reference(app, app.env, node, contnode) + assert rn is None + + # js:module and js:object context helps to search objects + kwargs = {'js:module': 'foo', 'js:object': 'bar'} + node, contnode = fake_node('js', 'meth', 'baz', 'baz()', **kwargs) + rn = missing_reference(app, app.env, node, contnode) + assert rn.astext() == 'baz()' + + +def test_missing_reference_disabled_domain(tmp_path, app, status, warning): + inv_file = tmp_path / 'inventory' + inv_file.write_bytes(inventory_v2) + set_config(app, { + 'inv': ('https://docs.python.org/', str(inv_file)), + }) + + # load the inventory and check if it's done correctly + normalize_intersphinx_mapping(app, app.config) + load_mappings(app) + + def case(*, term, doc, py): + def assert_(rn, expected): + if expected is None: + assert rn is None + else: + assert rn.astext() == expected + + kwargs = {} + + node, contnode = fake_node('std', 'term', 'a term', 'a term', **kwargs) + rn = missing_reference(app, app.env, node, contnode) + assert_(rn, 'a term' if term else None) + + node, contnode = fake_node('std', 'term', 'inv:a term', 'a term', **kwargs) + rn = missing_reference(app, app.env, node, contnode) + assert_(rn, 'a term') + + node, contnode = fake_node('std', 'doc', 'docname', 'docname', **kwargs) + rn = missing_reference(app, app.env, node, contnode) + assert_(rn, 'docname' if doc else None) + + node, contnode = fake_node('std', 'doc', 'inv:docname', 'docname', **kwargs) + rn = missing_reference(app, app.env, node, contnode) + assert_(rn, 'docname') + + # an arbitrary ref in another domain + node, contnode = fake_node('py', 'func', 'module1.func', 'func()', **kwargs) + rn = missing_reference(app, app.env, node, contnode) + assert_(rn, 'func()' if py else None) + + node, contnode = fake_node('py', 'func', 'inv:module1.func', 'func()', **kwargs) + rn = missing_reference(app, app.env, node, contnode) + assert_(rn, 'func()') + + # the base case, everything should resolve + assert app.config.intersphinx_disabled_reftypes == [] + case(term=True, doc=True, py=True) + + # disabled a single ref type + app.config.intersphinx_disabled_reftypes = ['std:doc'] + case(term=True, doc=False, py=True) + + # disabled a whole domain + app.config.intersphinx_disabled_reftypes = ['std:*'] + case(term=False, doc=False, py=True) + + # disabled all domains + app.config.intersphinx_disabled_reftypes = ['*'] + case(term=False, doc=False, py=False) + + +def test_inventory_not_having_version(tmp_path, app, status, warning): + inv_file = tmp_path / 'inventory' + inv_file.write_bytes(inventory_v2_not_having_version) + set_config(app, { + 'https://docs.python.org/': str(inv_file), + }) + + # load the inventory and check if it's done correctly + normalize_intersphinx_mapping(app, app.config) + load_mappings(app) + + rn = reference_check(app, 'py', 'mod', 'module1', 'foo') + assert isinstance(rn, nodes.reference) + assert rn['refuri'] == 'https://docs.python.org/foo.html#module-module1' + assert rn['reftitle'] == '(in foo)' + assert rn[0].astext() == 'Long Module desc' + + +def test_load_mappings_warnings(tmp_path, app, status, warning): + """ + load_mappings issues a warning if new-style mapping + identifiers are not string + """ + inv_file = tmp_path / 'inventory' + inv_file.write_bytes(inventory_v2) + set_config(app, { + 'https://docs.python.org/': str(inv_file), + 'py3k': ('https://docs.python.org/py3k/', str(inv_file)), + 'repoze.workflow': ('http://docs.repoze.org/workflow/', str(inv_file)), + 'django-taggit': ('http://django-taggit.readthedocs.org/en/latest/', + str(inv_file)), + 12345: ('http://www.sphinx-doc.org/en/stable/', str(inv_file)), + }) + + # load the inventory and check if it's done correctly + normalize_intersphinx_mapping(app, app.config) + load_mappings(app) + warnings = warning.getvalue().splitlines() + assert len(warnings) == 2 + assert "The pre-Sphinx 1.0 'intersphinx_mapping' format is " in warnings[0] + assert 'intersphinx identifier 12345 is not string. Ignored' in warnings[1] + + +def test_load_mappings_fallback(tmp_path, app, status, warning): + inv_file = tmp_path / 'inventory' + inv_file.write_bytes(inventory_v2) + set_config(app, {}) + + # connect to invalid path + app.config.intersphinx_mapping = { + 'fallback': ('https://docs.python.org/py3k/', '/invalid/inventory/path'), + } + normalize_intersphinx_mapping(app, app.config) + load_mappings(app) + assert "failed to reach any of the inventories" in warning.getvalue() + + rn = reference_check(app, 'py', 'func', 'module1.func', 'foo') + assert rn is None + + # clear messages + status.truncate(0) + warning.truncate(0) + + # add fallbacks to mapping + app.config.intersphinx_mapping = { + 'fallback': ('https://docs.python.org/py3k/', ('/invalid/inventory/path', + str(inv_file))), + } + normalize_intersphinx_mapping(app, app.config) + load_mappings(app) + assert "encountered some issues with some of the inventories" in status.getvalue() + assert warning.getvalue() == "" + + rn = reference_check(app, 'py', 'func', 'module1.func', 'foo') + assert isinstance(rn, nodes.reference) + + +class TestStripBasicAuth: + """Tests for sphinx.ext.intersphinx._strip_basic_auth()""" + def test_auth_stripped(self): + """basic auth creds stripped from URL containing creds""" + url = 'https://user:12345@domain.com/project/objects.inv' + expected = 'https://domain.com/project/objects.inv' + actual = _strip_basic_auth(url) + assert expected == actual + + def test_no_auth(self): + """url unchanged if param doesn't contain basic auth creds""" + url = 'https://domain.com/project/objects.inv' + expected = 'https://domain.com/project/objects.inv' + actual = _strip_basic_auth(url) + assert expected == actual + + def test_having_port(self): + """basic auth creds correctly stripped from URL containing creds even if URL + contains port""" + url = 'https://user:12345@domain.com:8080/project/objects.inv' + expected = 'https://domain.com:8080/project/objects.inv' + actual = _strip_basic_auth(url) + assert expected == actual + + +def test_getsafeurl_authed(): + """_get_safe_url() with a url with basic auth""" + url = 'https://user:12345@domain.com/project/objects.inv' + expected = 'https://user@domain.com/project/objects.inv' + actual = _get_safe_url(url) + assert expected == actual + + +def test_getsafeurl_authed_having_port(): + """_get_safe_url() with a url with basic auth having port""" + url = 'https://user:12345@domain.com:8080/project/objects.inv' + expected = 'https://user@domain.com:8080/project/objects.inv' + actual = _get_safe_url(url) + assert expected == actual + + +def test_getsafeurl_unauthed(): + """_get_safe_url() with a url without basic auth""" + url = 'https://domain.com/project/objects.inv' + expected = 'https://domain.com/project/objects.inv' + actual = _get_safe_url(url) + assert expected == actual + + +def test_inspect_main_noargs(capsys): + """inspect_main interface, without arguments""" + assert inspect_main([]) == 1 + + expected = ( + "Print out an inventory file.\n" + "Error: must specify local path or URL to an inventory file." + ) + stdout, stderr = capsys.readouterr() + assert stdout == "" + assert stderr == expected + "\n" + + +def test_inspect_main_file(capsys, tmp_path): + """inspect_main interface, with file argument""" + inv_file = tmp_path / 'inventory' + inv_file.write_bytes(inventory_v2) + + inspect_main([str(inv_file)]) + + stdout, stderr = capsys.readouterr() + assert stdout.startswith("c:function\n") + assert stderr == "" + + +def test_inspect_main_url(capsys): + """inspect_main interface, with url argument""" + class InventoryHandler(http.server.BaseHTTPRequestHandler): + def do_GET(self): + self.send_response(200, "OK") + self.end_headers() + self.wfile.write(inventory_v2) + + def log_message(*args, **kwargs): + # Silenced. + pass + + url = 'http://localhost:7777/' + INVENTORY_FILENAME + + with http_server(InventoryHandler): + inspect_main([url]) + + stdout, stderr = capsys.readouterr() + assert stdout.startswith("c:function\n") + assert stderr == "" + + +@pytest.mark.sphinx('html', testroot='ext-intersphinx-role') +def test_intersphinx_role(app, warning): + inv_file = app.srcdir / 'inventory' + inv_file.write_bytes(inventory_v2) + app.config.intersphinx_mapping = { + 'inv': ('http://example.org/', str(inv_file)), + } + app.config.intersphinx_cache_limit = 0 + app.config.nitpicky = True + + # load the inventory and check if it's done correctly + normalize_intersphinx_mapping(app, app.config) + load_mappings(app) + + app.build() + content = (app.outdir / 'index.html').read_text(encoding='utf8') + wStr = warning.getvalue() + + html = '<a class="reference external" href="http://example.org/{}" title="(in foo v2.0)">' + assert html.format('foo.html#module-module1') in content + assert html.format('foo.html#module-module2') in content + assert "WARNING: external py:mod reference target not found: module3" in wStr + assert "WARNING: external py:mod reference target not found: module10" in wStr + + assert html.format('sub/foo.html#module1.func') in content + assert "WARNING: external py:meth reference target not found: inv:Foo.bar" in wStr + + assert "WARNING: role for external cross-reference not found: py:nope" in wStr + + # default domain + assert html.format('index.html#std_uint8_t') in content + assert "WARNING: role for external cross-reference not found: nope" in wStr + + # std roles without domain prefix + assert html.format('docname.html') in content + assert html.format('index.html#cmdoption-ls-l') in content + + # explicit inventory + assert html.format('cfunc.html#CFunc') in content + assert "WARNING: inventory for external cross-reference not found: invNope" in wStr + + # explicit title + assert html.format('index.html#foons') in content diff --git a/tests/test_ext_math.py b/tests/test_ext_math.py new file mode 100644 index 0000000..d5331f8 --- /dev/null +++ b/tests/test_ext_math.py @@ -0,0 +1,345 @@ +"""Test math extensions.""" + +import re +import shutil +import subprocess +import warnings + +import pytest +from docutils import nodes + +from sphinx.ext.mathjax import MATHJAX_URL +from sphinx.testing.util import assert_node + + +def has_binary(binary): + try: + subprocess.check_output([binary]) + except FileNotFoundError: + return False + except OSError: + pass + return True + + +@pytest.mark.skipif(not has_binary('dvipng'), + reason='Requires dvipng" binary') +@pytest.mark.sphinx('html', testroot='ext-math-simple', + confoverrides = {'extensions': ['sphinx.ext.imgmath']}) +def test_imgmath_png(app, status, warning): + app.builder.build_all() + if "LaTeX command 'latex' cannot be run" in warning.getvalue(): + msg = 'LaTeX command "latex" is not available' + raise pytest.skip.Exception(msg) + if "dvipng command 'dvipng' cannot be run" in warning.getvalue(): + msg = 'dvipng command "dvipng" is not available' + raise pytest.skip.Exception(msg) + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + shutil.rmtree(app.outdir) + html = (r'<div class="math">\s*<p>\s*<img src="_images/math/\w+.png"' + r'\s*alt="a\^2\+b\^2=c\^2"/>\s*</p>\s*</div>') + assert re.search(html, content, re.S) + + +@pytest.mark.skipif(not has_binary('dvisvgm'), + reason='Requires dvisvgm" binary') +@pytest.mark.sphinx('html', testroot='ext-math-simple', + confoverrides={'extensions': ['sphinx.ext.imgmath'], + 'imgmath_image_format': 'svg'}) +def test_imgmath_svg(app, status, warning): + app.builder.build_all() + if "LaTeX command 'latex' cannot be run" in warning.getvalue(): + msg = 'LaTeX command "latex" is not available' + raise pytest.skip.Exception(msg) + if "dvisvgm command 'dvisvgm' cannot be run" in warning.getvalue(): + msg = 'dvisvgm command "dvisvgm" is not available' + raise pytest.skip.Exception(msg) + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + shutil.rmtree(app.outdir) + html = (r'<div class="math">\s*<p>\s*<img src="_images/math/\w+.svg"' + r'\s*alt="a\^2\+b\^2=c\^2"/>\s*</p>\s*</div>') + assert re.search(html, content, re.S) + + +@pytest.mark.skipif(not has_binary('dvisvgm'), + reason='Requires dvisvgm" binary') +@pytest.mark.sphinx('html', testroot='ext-math-simple', + confoverrides={'extensions': ['sphinx.ext.imgmath'], + 'imgmath_image_format': 'svg', + 'imgmath_embed': True}) +def test_imgmath_svg_embed(app, status, warning): + app.builder.build_all() + if "LaTeX command 'latex' cannot be run" in warning.getvalue(): + msg = 'LaTeX command "latex" is not available' + raise pytest.skip.Exception(msg) + if "dvisvgm command 'dvisvgm' cannot be run" in warning.getvalue(): + msg = 'dvisvgm command "dvisvgm" is not available' + raise pytest.skip.Exception(msg) + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + shutil.rmtree(app.outdir) + html = r'<img src="data:image/svg\+xml;base64,[\w\+/=]+"' + assert re.search(html, content, re.DOTALL) + + +@pytest.mark.sphinx('html', testroot='ext-math', + confoverrides={'extensions': ['sphinx.ext.mathjax'], + 'mathjax_options': {'integrity': 'sha384-0123456789'}}) +def test_mathjax_options(app, status, warning): + app.builder.build_all() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + shutil.rmtree(app.outdir) + assert ('<script async="async" integrity="sha384-0123456789" ' + 'src="https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js">' + '</script>' in content) + + +@pytest.mark.sphinx('html', testroot='ext-math', + confoverrides={'extensions': ['sphinx.ext.mathjax']}) +def test_mathjax_align(app, status, warning): + app.builder.build_all() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + shutil.rmtree(app.outdir) + html = (r'<div class="math notranslate nohighlight">\s*' + r'\\\[ \\begin\{align\}\\begin\{aligned\}S \&= \\pi r\^2\\\\' + r'V \&= \\frac\{4\}\{3\} \\pi r\^3\\end\{aligned\}\\end\{align\} \\\]</div>') + assert re.search(html, content, re.S) + + +@pytest.mark.sphinx('html', testroot='ext-math', + confoverrides={'math_number_all': True, + 'extensions': ['sphinx.ext.mathjax']}) +def test_math_number_all_mathjax(app, status, warning): + app.build() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + html = (r'<div class="math notranslate nohighlight" id="equation-index-0">\s*' + r'<span class="eqno">\(1\)<a .*>\xb6</a></span>\\\[a\^2\+b\^2=c\^2\\\]</div>') + assert re.search(html, content, re.S) + + +@pytest.mark.sphinx('latex', testroot='ext-math', + confoverrides={'extensions': ['sphinx.ext.mathjax']}) +def test_math_number_all_latex(app, status, warning): + app.build() + + content = (app.outdir / 'python.tex').read_text(encoding='utf8') + macro = (r'\\begin{equation\*}\s*' + r'\\begin{split}a\^2\+b\^2=c\^2\\end{split}\s*' + r'\\end{equation\*}') + assert re.search(macro, content, re.S) + + macro = r'Inline \\\(E=mc\^2\\\)' + assert re.search(macro, content, re.S) + + macro = (r'\\begin{equation\*}\s*' + r'\\begin{split}e\^{i\\pi}\+1=0\\end{split}\s+' + r'\\end{equation\*}') + assert re.search(macro, content, re.S) + + macro = (r'\\begin{align\*}\\!\\begin{aligned}\s*' + r'S &= \\pi r\^2\\\\\s*' + r'V &= \\frac\{4}\{3} \\pi r\^3\\\\\s*' + r'\\end{aligned}\\end{align\*}') + assert re.search(macro, content, re.S) + + macro = r'Referencing equation \\eqref{equation:math:foo}.' + assert re.search(macro, content, re.S) + + +@pytest.mark.sphinx('html', testroot='ext-math', + confoverrides={'extensions': ['sphinx.ext.mathjax'], + 'math_eqref_format': 'Eq.{number}'}) +def test_math_eqref_format_html(app, status, warning): + app.builder.build_all() + + content = (app.outdir / 'math.html').read_text(encoding='utf8') + html = ('<p>Referencing equation <a class="reference internal" ' + 'href="#equation-foo">Eq.1</a> and <a class="reference internal" ' + 'href="#equation-foo">Eq.1</a>.</p>') + assert html in content + + +@pytest.mark.sphinx('latex', testroot='ext-math', + confoverrides={'extensions': ['sphinx.ext.mathjax'], + 'math_eqref_format': 'Eq.{number}'}) +def test_math_eqref_format_latex(app, status, warning): + app.builder.build_all() + + content = (app.outdir / 'python.tex').read_text(encoding='utf8') + macro = (r'Referencing equation Eq.\\ref{equation:math:foo} and ' + r'Eq.\\ref{equation:math:foo}.') + assert re.search(macro, content, re.S) + + +@pytest.mark.sphinx('html', testroot='ext-math', + confoverrides={'extensions': ['sphinx.ext.mathjax'], + 'numfig': True, + 'math_numfig': True}) +def test_mathjax_numfig_html(app, status, warning): + app.builder.build_all() + + content = (app.outdir / 'math.html').read_text(encoding='utf8') + html = ('<div class="math notranslate nohighlight" id="equation-math-0">\n' + '<span class="eqno">(1.2)') + assert html in content + html = ('<p>Referencing equation <a class="reference internal" ' + 'href="#equation-foo">(1.1)</a> and ' + '<a class="reference internal" href="#equation-foo">(1.1)</a>.</p>') + assert html in content + + +@pytest.mark.sphinx('html', testroot='ext-math', + confoverrides={'extensions': ['sphinx.ext.imgmath'], + 'numfig': True, + 'numfig_secnum_depth': 0, + 'math_numfig': True}) +def test_imgmath_numfig_html(app, status, warning): + app.builder.build_all() + + content = (app.outdir / 'page.html').read_text(encoding='utf8') + html = '<span class="eqno">(3)<a class="headerlink" href="#equation-bar"' + assert html in content + html = ('<p>Referencing equations <a class="reference internal" ' + 'href="math.html#equation-foo">(1)</a> and ' + '<a class="reference internal" href="#equation-bar">(3)</a>.</p>') + assert html in content + + +@pytest.mark.sphinx('dummy', testroot='ext-math-compat') +def test_math_compat(app, status, warning): + with warnings.catch_warnings(record=True): + app.builder.build_all() + doctree = app.env.get_and_resolve_doctree('index', app.builder) + + assert_node(doctree, + [nodes.document, nodes.section, (nodes.title, + [nodes.section, (nodes.title, + nodes.paragraph)], + nodes.section)]) + assert_node(doctree[0][1][1], + ('Inline: ', + [nodes.math, "E=mc^2"], + '\nInline my math: ', + [nodes.math, "E = mc^2"])) + assert_node(doctree[0][2], + ([nodes.title, "block"], + [nodes.math_block, "a^2+b^2=c^2\n\n"], + [nodes.paragraph, "Second math"], + [nodes.math_block, "e^{i\\pi}+1=0\n\n"], + [nodes.paragraph, "Multi math equations"], + [nodes.math_block, "E = mc^2"])) + + +@pytest.mark.sphinx('html', testroot='ext-math', + confoverrides={'extensions': ['sphinx.ext.mathjax'], + 'mathjax3_config': {'extensions': ['tex2jax.js']}}) +def test_mathjax3_config(app, status, warning): + app.builder.build_all() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert MATHJAX_URL in content + assert ('<script defer="defer" src="%s">' % MATHJAX_URL in content) + assert ('<script>window.MathJax = {"extensions": ["tex2jax.js"]}</script>' in content) + + +@pytest.mark.sphinx('html', testroot='ext-math', + confoverrides={'extensions': ['sphinx.ext.mathjax'], + 'mathjax2_config': {'extensions': ['tex2jax.js']}}) +def test_mathjax2_config(app, status, warning): + app.builder.build_all() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert ('<script async="async" src="%s">' % MATHJAX_URL in content) + assert ('<script type="text/x-mathjax-config">' + 'MathJax.Hub.Config({"extensions": ["tex2jax.js"]})' + '</script>' in content) + + +@pytest.mark.sphinx('html', testroot='ext-math', + confoverrides={'extensions': ['sphinx.ext.mathjax'], + 'mathjax_options': {'async': 'async'}, + 'mathjax3_config': {'extensions': ['tex2jax.js']}}) +def test_mathjax_options_async_for_mathjax3(app, status, warning): + app.builder.build_all() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert MATHJAX_URL in content + assert ('<script async="async" src="%s">' % MATHJAX_URL in content) + + +@pytest.mark.sphinx('html', testroot='ext-math', + confoverrides={'extensions': ['sphinx.ext.mathjax'], + 'mathjax_options': {'defer': 'defer'}, + 'mathjax2_config': {'extensions': ['tex2jax.js']}}) +def test_mathjax_options_defer_for_mathjax2(app, status, warning): + app.builder.build_all() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert ('<script defer="defer" src="%s">' % MATHJAX_URL in content) + + +@pytest.mark.sphinx( + 'html', testroot='ext-math', + confoverrides={ + 'extensions': ['sphinx.ext.mathjax'], + 'mathjax_path': 'MathJax.js', + }, +) +def test_mathjax_path(app): + app.builder.build_all() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert '<script async="async" src="_static/MathJax.js"></script>' in content + + +@pytest.mark.sphinx( + 'html', testroot='ext-math', + confoverrides={ + 'extensions': ['sphinx.ext.mathjax'], + 'mathjax_path': 'MathJax.js?config=scipy-mathjax', + }, +) +def test_mathjax_path_config(app): + app.builder.build_all() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert '<script async="async" src="_static/MathJax.js?config=scipy-mathjax"></script>' in content + + +@pytest.mark.sphinx('html', testroot='ext-math', + confoverrides={'extensions': ['sphinx.ext.mathjax']}) +def test_mathjax_is_installed_only_if_document_having_math(app, status, warning): + app.builder.build_all() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert MATHJAX_URL in content + + content = (app.outdir / 'nomath.html').read_text(encoding='utf8') + assert MATHJAX_URL not in content + + +@pytest.mark.sphinx('html', testroot='basic', + confoverrides={'extensions': ['sphinx.ext.mathjax']}) +def test_mathjax_is_not_installed_if_no_equations(app, status, warning): + app.builder.build_all() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert 'MathJax.js' not in content + + +@pytest.mark.sphinx('html', testroot='ext-math', + confoverrides={'extensions': ['sphinx.ext.mathjax']}) +def test_mathjax_is_installed_if_no_equations_when_forced(app, status, warning): + app.set_html_assets_policy('always') + app.builder.build_all() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert MATHJAX_URL in content + + content = (app.outdir / 'nomath.html').read_text(encoding='utf8') + assert MATHJAX_URL in content diff --git a/tests/test_ext_napoleon.py b/tests/test_ext_napoleon.py new file mode 100644 index 0000000..00b7ac1 --- /dev/null +++ b/tests/test_ext_napoleon.py @@ -0,0 +1,218 @@ +"""Tests for :mod:`sphinx.ext.napoleon.__init__` module.""" + +import functools +from collections import namedtuple +from unittest import mock + +import pytest + +from sphinx.application import Sphinx +from sphinx.ext.napoleon import Config, _process_docstring, _skip_member, setup + + +def simple_decorator(f): + """ + A simple decorator that does nothing, for tests to use. + """ + @functools.wraps(f) + def wrapper(*args, **kwargs): + return f(*args, **kwargs) + return wrapper + + +def _private_doc(): + """module._private_doc.DOCSTRING""" + pass + + +def _private_undoc(): + pass + + +def __special_doc__(): + """module.__special_doc__.DOCSTRING""" + pass + + +def __special_undoc__(): + pass + + +class SampleClass: + def _private_doc(self): + """SampleClass._private_doc.DOCSTRING""" + pass + + def _private_undoc(self): + pass + + def __special_doc__(self): + """SampleClass.__special_doc__.DOCSTRING""" + pass + + def __special_undoc__(self): + pass + + @simple_decorator + def __decorated_func__(self): + """doc""" + pass + + +class SampleError(Exception): + def _private_doc(self): + """SampleError._private_doc.DOCSTRING""" + pass + + def _private_undoc(self): + pass + + def __special_doc__(self): + """SampleError.__special_doc__.DOCSTRING""" + pass + + def __special_undoc__(self): + pass + + +SampleNamedTuple = namedtuple('SampleNamedTuple', 'user_id block_type def_id') + + +class TestProcessDocstring: + def test_modify_in_place(self): + lines = ['Summary line.', + '', + 'Args:', + ' arg1: arg1 description'] + app = mock.Mock() + app.config = Config() + _process_docstring(app, 'class', 'SampleClass', SampleClass, + mock.Mock(), lines) + + expected = ['Summary line.', + '', + ':param arg1: arg1 description', + ''] + assert expected == lines + + +class TestSetup: + def test_unknown_app_type(self): + setup(object()) + + def test_add_config_values(self): + app = mock.Mock(Sphinx) + setup(app) + for name in Config._config_values: + has_config = False + for method_name, args, _kwargs in app.method_calls: + if ( + method_name == 'add_config_value' and + args[0] == name + ): + has_config = True + if not has_config: + pytest.fail('Config value was not added to app %s' % name) + + has_process_docstring = False + has_skip_member = False + for method_name, args, _kwargs in app.method_calls: + if method_name == 'connect': + if ( + args[0] == 'autodoc-process-docstring' and + args[1] == _process_docstring + ): + has_process_docstring = True + elif ( + args[0] == 'autodoc-skip-member' and + args[1] == _skip_member + ): + has_skip_member = True + if not has_process_docstring: + pytest.fail('autodoc-process-docstring never connected') + if not has_skip_member: + pytest.fail('autodoc-skip-member never connected') + + +class TestSkipMember: + def assert_skip(self, what, member, obj, expect_default_skip, config_name): + skip = True + app = mock.Mock() + app.config = Config() + setattr(app.config, config_name, True) + if expect_default_skip: + assert None is _skip_member(app, what, member, obj, skip, mock.Mock()) + else: + assert _skip_member(app, what, member, obj, skip, mock.Mock()) is False + setattr(app.config, config_name, False) + assert None is _skip_member(app, what, member, obj, skip, mock.Mock()) + + def test_namedtuple(self): + # Since python 3.7, namedtuple._asdict() has not been documented + # because there is no way to check the method is a member of the + # namedtuple class. This testcase confirms only it does not + # raise an error on building document (refs: #1455) + self.assert_skip('class', '_asdict', + SampleNamedTuple._asdict, True, + 'napoleon_include_private_with_doc') + + def test_class_private_doc(self): + self.assert_skip('class', '_private_doc', + SampleClass._private_doc, False, + 'napoleon_include_private_with_doc') + + def test_class_private_undoc(self): + self.assert_skip('class', '_private_undoc', + SampleClass._private_undoc, True, + 'napoleon_include_private_with_doc') + + def test_class_special_doc(self): + self.assert_skip('class', '__special_doc__', + SampleClass.__special_doc__, False, + 'napoleon_include_special_with_doc') + + def test_class_special_undoc(self): + self.assert_skip('class', '__special_undoc__', + SampleClass.__special_undoc__, True, + 'napoleon_include_special_with_doc') + + def test_class_decorated_doc(self): + self.assert_skip('class', '__decorated_func__', + SampleClass.__decorated_func__, False, + 'napoleon_include_special_with_doc') + + def test_exception_private_doc(self): + self.assert_skip('exception', '_private_doc', + SampleError._private_doc, False, + 'napoleon_include_private_with_doc') + + def test_exception_private_undoc(self): + self.assert_skip('exception', '_private_undoc', + SampleError._private_undoc, True, + 'napoleon_include_private_with_doc') + + def test_exception_special_doc(self): + self.assert_skip('exception', '__special_doc__', + SampleError.__special_doc__, False, + 'napoleon_include_special_with_doc') + + def test_exception_special_undoc(self): + self.assert_skip('exception', '__special_undoc__', + SampleError.__special_undoc__, True, + 'napoleon_include_special_with_doc') + + def test_module_private_doc(self): + self.assert_skip('module', '_private_doc', _private_doc, False, + 'napoleon_include_private_with_doc') + + def test_module_private_undoc(self): + self.assert_skip('module', '_private_undoc', _private_undoc, True, + 'napoleon_include_private_with_doc') + + def test_module_special_doc(self): + self.assert_skip('module', '__special_doc__', __special_doc__, False, + 'napoleon_include_special_with_doc') + + def test_module_special_undoc(self): + self.assert_skip('module', '__special_undoc__', __special_undoc__, True, + 'napoleon_include_special_with_doc') diff --git a/tests/test_ext_napoleon_docstring.py b/tests/test_ext_napoleon_docstring.py new file mode 100644 index 0000000..87fad61 --- /dev/null +++ b/tests/test_ext_napoleon_docstring.py @@ -0,0 +1,2660 @@ +"""Tests for :mod:`sphinx.ext.napoleon.docstring` module.""" + +import re +from collections import namedtuple +from inspect import cleandoc +from textwrap import dedent +from unittest import mock + +import pytest + +from sphinx.ext.napoleon import Config +from sphinx.ext.napoleon.docstring import ( + GoogleDocstring, + NumpyDocstring, + _convert_numpy_type_spec, + _recombine_set_tokens, + _token_type, + _tokenize_type_spec, +) + +from .ext_napoleon_pep526_data_google import PEP526GoogleClass +from .ext_napoleon_pep526_data_numpy import PEP526NumpyClass + + +class NamedtupleSubclass(namedtuple('NamedtupleSubclass', ('attr1', 'attr2'))): + """Sample namedtuple subclass + + Attributes + ---------- + attr1 : Arbitrary type + Quick description of attr1 + attr2 : Another arbitrary type + Quick description of attr2 + attr3 : Type + + Adds a newline after the type + + """ + # To avoid creating a dict, as a namedtuple doesn't have it: + __slots__ = () + + def __new__(cls, attr1, attr2=None): + return super().__new__(cls, attr1, attr2) + + +class TestNamedtupleSubclass: + def test_attributes_docstring(self): + config = Config() + actual = str(NumpyDocstring(cleandoc(NamedtupleSubclass.__doc__), + config=config, app=None, what='class', + name='NamedtupleSubclass', obj=NamedtupleSubclass)) + expected = """\ +Sample namedtuple subclass + +.. attribute:: attr1 + + Quick description of attr1 + + :type: Arbitrary type + +.. attribute:: attr2 + + Quick description of attr2 + + :type: Another arbitrary type + +.. attribute:: attr3 + + Adds a newline after the type + + :type: Type +""" + + assert expected == actual + + +class TestInlineAttribute: + inline_google_docstring = ('inline description with ' + '``a : in code``, ' + 'a :ref:`reference`, ' + 'a `link <https://foo.bar>`_, ' + 'a :meta public:, ' + 'a :meta field: value and ' + 'an host:port and HH:MM strings.') + + @staticmethod + def _docstring(source): + rst = GoogleDocstring(source, config=Config(), app=None, what='attribute', name='some_data', obj=0) + return str(rst) + + def test_class_data_member(self): + source = 'data member description:\n\n- a: b' + actual = self._docstring(source).splitlines() + assert actual == ['data member description:', '', '- a: b'] + + def test_class_data_member_inline(self): + source = f'CustomType: {self.inline_google_docstring}' + actual = self._docstring(source).splitlines() + assert actual == [self.inline_google_docstring, '', ':type: CustomType'] + + def test_class_data_member_inline_no_type(self): + source = self.inline_google_docstring + actual = self._docstring(source).splitlines() + assert actual == [source] + + def test_class_data_member_inline_ref_in_type(self): + source = f':class:`int`: {self.inline_google_docstring}' + actual = self._docstring(source).splitlines() + assert actual == [self.inline_google_docstring, '', ':type: :class:`int`'] + + +class TestGoogleDocstring: + docstrings = [( + """Single line summary""", + """Single line summary""", + ), ( + """ + Single line summary + + Extended description + + """, + """ + Single line summary + + Extended description + """, + ), ( + """ + Single line summary + + Args: + arg1(str):Extended + description of arg1 + """, + """ + Single line summary + + :Parameters: **arg1** (*str*) -- Extended + description of arg1 + """, + ), ( + """ + Single line summary + + Args: + arg1(str):Extended + description of arg1 + arg2 ( int ) : Extended + description of arg2 + + Keyword Args: + kwarg1(str):Extended + description of kwarg1 + kwarg2 ( int ) : Extended + description of kwarg2""", + """ + Single line summary + + :Parameters: * **arg1** (*str*) -- Extended + description of arg1 + * **arg2** (*int*) -- Extended + description of arg2 + + :Keyword Arguments: * **kwarg1** (*str*) -- Extended + description of kwarg1 + * **kwarg2** (*int*) -- Extended + description of kwarg2 + """, + ), ( + """ + Single line summary + + Arguments: + arg1(str):Extended + description of arg1 + arg2 ( int ) : Extended + description of arg2 + + Keyword Arguments: + kwarg1(str):Extended + description of kwarg1 + kwarg2 ( int ) : Extended + description of kwarg2""", + """ + Single line summary + + :Parameters: * **arg1** (*str*) -- Extended + description of arg1 + * **arg2** (*int*) -- Extended + description of arg2 + + :Keyword Arguments: * **kwarg1** (*str*) -- Extended + description of kwarg1 + * **kwarg2** (*int*) -- Extended + description of kwarg2 + """, + ), ( + """ + Single line summary + + Return: + str:Extended + description of return value + """, + """ + Single line summary + + :returns: *str* -- Extended + description of return value + """, + ), ( + """ + Single line summary + + Returns: + str:Extended + description of return value + """, + """ + Single line summary + + :returns: *str* -- Extended + description of return value + """, + ), ( + """ + Single line summary + + Returns: + Extended + description of return value + """, + """ + Single line summary + + :returns: Extended + description of return value + """, + ), ( + """ + Single line summary + + Returns: + Extended + """, + """ + Single line summary + + :returns: Extended + """, + ), ( + """ + Single line summary + + Args: + arg1(str):Extended + description of arg1 + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. + """, + """ + Single line summary + + :Parameters: * **arg1** (*str*) -- Extended + description of arg1 + * **\\*args** -- Variable length argument list. + * **\\*\\*kwargs** -- Arbitrary keyword arguments. + """, + ), ( + """ + Single line summary + + Args: + arg1 (list(int)): Description + arg2 (list[int]): Description + arg3 (dict(str, int)): Description + arg4 (dict[str, int]): Description + """, + """ + Single line summary + + :Parameters: * **arg1** (*list(int)*) -- Description + * **arg2** (*list[int]*) -- Description + * **arg3** (*dict(str, int)*) -- Description + * **arg4** (*dict[str, int]*) -- Description + """, + ), ( + """ + Single line summary + + Receive: + arg1 (list(int)): Description + arg2 (list[int]): Description + """, + """ + Single line summary + + :Receives: * **arg1** (*list(int)*) -- Description + * **arg2** (*list[int]*) -- Description + """, + ), ( + """ + Single line summary + + Receives: + arg1 (list(int)): Description + arg2 (list[int]): Description + """, + """ + Single line summary + + :Receives: * **arg1** (*list(int)*) -- Description + * **arg2** (*list[int]*) -- Description + """, + ), ( + """ + Single line summary + + Yield: + str:Extended + description of yielded value + """, + """ + Single line summary + + :Yields: *str* -- Extended + description of yielded value + """, + ), ( + """ + Single line summary + + Yields: + Extended + description of yielded value + """, + """ + Single line summary + + :Yields: Extended + description of yielded value + """, + ), ( + """ + Single line summary + + Args: + + arg1 (list of str): Extended + description of arg1. + arg2 (tuple of int): Extended + description of arg2. + arg3 (tuple of list of float): Extended + description of arg3. + arg4 (int, float, or list of bool): Extended + description of arg4. + arg5 (list of int, float, or bool): Extended + description of arg5. + arg6 (list of int or float): Extended + description of arg6. + """, + """ + Single line summary + + :Parameters: * **arg1** (*list of str*) -- Extended + description of arg1. + * **arg2** (*tuple of int*) -- Extended + description of arg2. + * **arg3** (*tuple of list of float*) -- Extended + description of arg3. + * **arg4** (*int, float, or list of bool*) -- Extended + description of arg4. + * **arg5** (*list of int, float, or bool*) -- Extended + description of arg5. + * **arg6** (*list of int or float*) -- Extended + description of arg6. + """, + )] + + def test_sphinx_admonitions(self): + admonition_map = { + 'Attention': 'attention', + 'Caution': 'caution', + 'Danger': 'danger', + 'Error': 'error', + 'Hint': 'hint', + 'Important': 'important', + 'Note': 'note', + 'Tip': 'tip', + 'Todo': 'todo', + 'Warning': 'warning', + 'Warnings': 'warning', + } + config = Config() + for section, admonition in admonition_map.items(): + # Multiline + actual = str(GoogleDocstring(f"{section}:\n" + " this is the first line\n" + "\n" + " and this is the second line\n", + config)) + expect = (f".. {admonition}::\n" + "\n" + " this is the first line\n" + " \n" + " and this is the second line\n" + ) + assert expect == actual + + # Single line + actual = str(GoogleDocstring(f"{section}:\n" + " this is a single line\n", + config)) + expect = f".. {admonition}:: this is a single line\n" + assert expect == actual + + def test_docstrings(self): + config = Config( + napoleon_use_param=False, + napoleon_use_rtype=False, + napoleon_use_keyword=False, + ) + for docstring, expected in self.docstrings: + actual = str(GoogleDocstring(dedent(docstring), config)) + expected = dedent(expected) + assert expected == actual + + def test_parameters_with_class_reference(self): + docstring = """\ +Construct a new XBlock. + +This class should only be used by runtimes. + +Arguments: + runtime (:class:`~typing.Dict`\\[:class:`int`,:class:`str`\\]): Use it to + access the environment. It is available in XBlock code + as ``self.runtime``. + + field_data (:class:`FieldData`): Interface used by the XBlock + fields to access their data from wherever it is persisted. + + scope_ids (:class:`ScopeIds`): Identifiers needed to resolve scopes. + +""" + + actual = str(GoogleDocstring(docstring)) + expected = """\ +Construct a new XBlock. + +This class should only be used by runtimes. + +:param runtime: Use it to + access the environment. It is available in XBlock code + as ``self.runtime``. +:type runtime: :class:`~typing.Dict`\\[:class:`int`,:class:`str`\\] +:param field_data: Interface used by the XBlock + fields to access their data from wherever it is persisted. +:type field_data: :class:`FieldData` +:param scope_ids: Identifiers needed to resolve scopes. +:type scope_ids: :class:`ScopeIds` +""" + assert expected == actual + + def test_attributes_with_class_reference(self): + docstring = """\ +Attributes: + in_attr(:class:`numpy.ndarray`): super-dooper attribute +""" + + actual = str(GoogleDocstring(docstring)) + expected = """\ +.. attribute:: in_attr + + super-dooper attribute + + :type: :class:`numpy.ndarray` +""" + assert expected == actual + + docstring = """\ +Attributes: + in_attr(numpy.ndarray): super-dooper attribute +""" + + actual = str(GoogleDocstring(docstring)) + expected = """\ +.. attribute:: in_attr + + super-dooper attribute + + :type: numpy.ndarray +""" + + def test_attributes_with_use_ivar(self): + docstring = """\ +Attributes: + foo (int): blah blah + bar (str): blah blah +""" + + config = Config(napoleon_use_ivar=True) + actual = str(GoogleDocstring(docstring, config, obj=self.__class__)) + expected = """\ +:ivar foo: blah blah +:vartype foo: int +:ivar bar: blah blah +:vartype bar: str +""" + assert expected == actual + + def test_code_block_in_returns_section(self): + docstring = """ +Returns: + foobar: foo:: + + codecode + codecode +""" + expected = """ +:returns: + + foo:: + + codecode + codecode +:rtype: foobar +""" + actual = str(GoogleDocstring(docstring)) + assert expected == actual + + def test_colon_in_return_type(self): + docstring = """Example property. + +Returns: + :py:class:`~.module.submodule.SomeClass`: an example instance + if available, None if not available. +""" + expected = """Example property. + +:returns: an example instance + if available, None if not available. +:rtype: :py:class:`~.module.submodule.SomeClass` +""" + actual = str(GoogleDocstring(docstring)) + assert expected == actual + + def test_xrefs_in_return_type(self): + docstring = """Example Function + +Returns: + :class:`numpy.ndarray`: A :math:`n \\times 2` array containing + a bunch of math items +""" + expected = """Example Function + +:returns: A :math:`n \\times 2` array containing + a bunch of math items +:rtype: :class:`numpy.ndarray` +""" + actual = str(GoogleDocstring(docstring)) + assert expected == actual + + def test_raises_types(self): + docstrings = [(""" +Example Function + +Raises: + RuntimeError: + A setting wasn't specified, or was invalid. + ValueError: + Something something value error. + :py:class:`AttributeError` + errors for missing attributes. + ~InvalidDimensionsError + If the dimensions couldn't be parsed. + `InvalidArgumentsError` + If the arguments are invalid. + :exc:`~ValueError` + If the arguments are wrong. + +""", """ +Example Function + +:raises RuntimeError: A setting wasn't specified, or was invalid. +:raises ValueError: Something something value error. +:raises AttributeError: errors for missing attributes. +:raises ~InvalidDimensionsError: If the dimensions couldn't be parsed. +:raises InvalidArgumentsError: If the arguments are invalid. +:raises ~ValueError: If the arguments are wrong. +"""), + ################################ + (""" +Example Function + +Raises: + InvalidDimensionsError + +""", """ +Example Function + +:raises InvalidDimensionsError: +"""), + ################################ + (""" +Example Function + +Raises: + Invalid Dimensions Error + +""", """ +Example Function + +:raises Invalid Dimensions Error: +"""), + ################################ + (""" +Example Function + +Raises: + Invalid Dimensions Error: With description + +""", """ +Example Function + +:raises Invalid Dimensions Error: With description +"""), + ################################ + (""" +Example Function + +Raises: + InvalidDimensionsError: If the dimensions couldn't be parsed. + +""", """ +Example Function + +:raises InvalidDimensionsError: If the dimensions couldn't be parsed. +"""), + ################################ + (""" +Example Function + +Raises: + Invalid Dimensions Error: If the dimensions couldn't be parsed. + +""", """ +Example Function + +:raises Invalid Dimensions Error: If the dimensions couldn't be parsed. +"""), + ################################ + (""" +Example Function + +Raises: + If the dimensions couldn't be parsed. + +""", """ +Example Function + +:raises If the dimensions couldn't be parsed.: +"""), + ################################ + (""" +Example Function + +Raises: + :class:`exc.InvalidDimensionsError` + +""", """ +Example Function + +:raises exc.InvalidDimensionsError: +"""), + ################################ + (""" +Example Function + +Raises: + :class:`exc.InvalidDimensionsError`: If the dimensions couldn't be parsed. + +""", """ +Example Function + +:raises exc.InvalidDimensionsError: If the dimensions couldn't be parsed. +"""), + ################################ + (""" +Example Function + +Raises: + :class:`exc.InvalidDimensionsError`: If the dimensions couldn't be parsed, + then a :class:`exc.InvalidDimensionsError` will be raised. + +""", """ +Example Function + +:raises exc.InvalidDimensionsError: If the dimensions couldn't be parsed, + then a :class:`exc.InvalidDimensionsError` will be raised. +"""), + ################################ + (""" +Example Function + +Raises: + :class:`exc.InvalidDimensionsError`: If the dimensions couldn't be parsed. + :class:`exc.InvalidArgumentsError`: If the arguments are invalid. + +""", """ +Example Function + +:raises exc.InvalidDimensionsError: If the dimensions couldn't be parsed. +:raises exc.InvalidArgumentsError: If the arguments are invalid. +"""), + ################################ + (""" +Example Function + +Raises: + :class:`exc.InvalidDimensionsError` + :class:`exc.InvalidArgumentsError` + +""", """ +Example Function + +:raises exc.InvalidDimensionsError: +:raises exc.InvalidArgumentsError: +""")] + for docstring, expected in docstrings: + actual = str(GoogleDocstring(docstring)) + assert expected == actual + + def test_kwargs_in_arguments(self): + docstring = """Allows to create attributes binded to this device. + +Some other paragraph. + +Code sample for usage:: + + dev.bind(loopback=Loopback) + dev.loopback.configure() + +Arguments: + **kwargs: name/class pairs that will create resource-managers + bound as instance attributes to this instance. See code + example above. +""" + expected = """Allows to create attributes binded to this device. + +Some other paragraph. + +Code sample for usage:: + + dev.bind(loopback=Loopback) + dev.loopback.configure() + +:param \\*\\*kwargs: name/class pairs that will create resource-managers + bound as instance attributes to this instance. See code + example above. +""" + actual = str(GoogleDocstring(docstring)) + assert expected == actual + + def test_section_header_formatting(self): + docstrings = [(""" +Summary line + +Example: + Multiline reStructuredText + literal code block + +""", """ +Summary line + +.. rubric:: Example + +Multiline reStructuredText +literal code block +"""), + ################################ + (""" +Summary line + +Example:: + + Multiline reStructuredText + literal code block + +""", """ +Summary line + +Example:: + + Multiline reStructuredText + literal code block +"""), + ################################ + (""" +Summary line + +:Example: + + Multiline reStructuredText + literal code block + +""", """ +Summary line + +:Example: + + Multiline reStructuredText + literal code block +""")] + for docstring, expected in docstrings: + actual = str(GoogleDocstring(docstring)) + assert expected == actual + + def test_list_in_parameter_description(self): + docstring = """One line summary. + +Parameters: + no_list (int): + one_bullet_empty (int): + * + one_bullet_single_line (int): + - first line + one_bullet_two_lines (int): + + first line + continued + two_bullets_single_line (int): + - first line + - second line + two_bullets_two_lines (int): + * first line + continued + * second line + continued + one_enumeration_single_line (int): + 1. first line + one_enumeration_two_lines (int): + 1) first line + continued + two_enumerations_one_line (int): + (iii) first line + (iv) second line + two_enumerations_two_lines (int): + a. first line + continued + b. second line + continued + one_definition_one_line (int): + item 1 + first line + one_definition_two_lines (int): + item 1 + first line + continued + two_definitions_one_line (int): + item 1 + first line + item 2 + second line + two_definitions_two_lines (int): + item 1 + first line + continued + item 2 + second line + continued + one_definition_blank_line (int): + item 1 + + first line + + extra first line + + two_definitions_blank_lines (int): + item 1 + + first line + + extra first line + + item 2 + + second line + + extra second line + + definition_after_inline_text (int): text line + + item 1 + first line + + definition_after_normal_text (int): + text line + + item 1 + first line +""" + + expected = """One line summary. + +:param no_list: +:type no_list: int +:param one_bullet_empty: + * +:type one_bullet_empty: int +:param one_bullet_single_line: + - first line +:type one_bullet_single_line: int +:param one_bullet_two_lines: + + first line + continued +:type one_bullet_two_lines: int +:param two_bullets_single_line: + - first line + - second line +:type two_bullets_single_line: int +:param two_bullets_two_lines: + * first line + continued + * second line + continued +:type two_bullets_two_lines: int +:param one_enumeration_single_line: + 1. first line +:type one_enumeration_single_line: int +:param one_enumeration_two_lines: + 1) first line + continued +:type one_enumeration_two_lines: int +:param two_enumerations_one_line: + (iii) first line + (iv) second line +:type two_enumerations_one_line: int +:param two_enumerations_two_lines: + a. first line + continued + b. second line + continued +:type two_enumerations_two_lines: int +:param one_definition_one_line: + item 1 + first line +:type one_definition_one_line: int +:param one_definition_two_lines: + item 1 + first line + continued +:type one_definition_two_lines: int +:param two_definitions_one_line: + item 1 + first line + item 2 + second line +:type two_definitions_one_line: int +:param two_definitions_two_lines: + item 1 + first line + continued + item 2 + second line + continued +:type two_definitions_two_lines: int +:param one_definition_blank_line: + item 1 + + first line + + extra first line +:type one_definition_blank_line: int +:param two_definitions_blank_lines: + item 1 + + first line + + extra first line + + item 2 + + second line + + extra second line +:type two_definitions_blank_lines: int +:param definition_after_inline_text: text line + + item 1 + first line +:type definition_after_inline_text: int +:param definition_after_normal_text: text line + + item 1 + first line +:type definition_after_normal_text: int +""" + config = Config(napoleon_use_param=True) + actual = str(GoogleDocstring(docstring, config)) + assert expected == actual + + expected = """One line summary. + +:Parameters: * **no_list** (*int*) + * **one_bullet_empty** (*int*) -- + + * + * **one_bullet_single_line** (*int*) -- + + - first line + * **one_bullet_two_lines** (*int*) -- + + + first line + continued + * **two_bullets_single_line** (*int*) -- + + - first line + - second line + * **two_bullets_two_lines** (*int*) -- + + * first line + continued + * second line + continued + * **one_enumeration_single_line** (*int*) -- + + 1. first line + * **one_enumeration_two_lines** (*int*) -- + + 1) first line + continued + * **two_enumerations_one_line** (*int*) -- + + (iii) first line + (iv) second line + * **two_enumerations_two_lines** (*int*) -- + + a. first line + continued + b. second line + continued + * **one_definition_one_line** (*int*) -- + + item 1 + first line + * **one_definition_two_lines** (*int*) -- + + item 1 + first line + continued + * **two_definitions_one_line** (*int*) -- + + item 1 + first line + item 2 + second line + * **two_definitions_two_lines** (*int*) -- + + item 1 + first line + continued + item 2 + second line + continued + * **one_definition_blank_line** (*int*) -- + + item 1 + + first line + + extra first line + * **two_definitions_blank_lines** (*int*) -- + + item 1 + + first line + + extra first line + + item 2 + + second line + + extra second line + * **definition_after_inline_text** (*int*) -- text line + + item 1 + first line + * **definition_after_normal_text** (*int*) -- text line + + item 1 + first line +""" + config = Config(napoleon_use_param=False) + actual = str(GoogleDocstring(docstring, config)) + assert expected == actual + + def test_custom_generic_sections(self): + + docstrings = (("""\ +Really Important Details: + You should listen to me! +""", """.. rubric:: Really Important Details + +You should listen to me! +"""), + ("""\ +Sooper Warning: + Stop hitting yourself! +""", """:Warns: **Stop hitting yourself!** +"""), + ("""\ +Params Style: + arg1 (int): Description of arg1 + arg2 (str): Description of arg2 + +""", """\ +:Params Style: * **arg1** (*int*) -- Description of arg1 + * **arg2** (*str*) -- Description of arg2 +"""), + ("""\ +Returns Style: + description of custom section + +""", """:Returns Style: description of custom section +""")) + + testConfig = Config(napoleon_custom_sections=['Really Important Details', + ('Sooper Warning', 'warns'), + ('Params Style', 'params_style'), + ('Returns Style', 'returns_style')]) + + for docstring, expected in docstrings: + actual = str(GoogleDocstring(docstring, testConfig)) + assert expected == actual + + def test_noindex(self): + docstring = """ +Attributes: + arg + description + +Methods: + func(i, j) + description +""" + + expected = """ +.. attribute:: arg + :no-index: + + description + +.. method:: func(i, j) + :no-index: + + + description +""" # noqa: W293 + config = Config() + actual = str(GoogleDocstring(docstring, config=config, app=None, what='module', + options={'no-index': True})) + assert expected == actual + + def test_keywords_with_types(self): + docstring = """\ +Do as you please + +Keyword Args: + gotham_is_yours (None): shall interfere. +""" + actual = str(GoogleDocstring(docstring)) + expected = """\ +Do as you please + +:keyword gotham_is_yours: shall interfere. +:kwtype gotham_is_yours: None +""" + assert expected == actual + + def test_pep526_annotations(self): + # Test class attributes annotations + config = Config( + napoleon_attr_annotations=True, + ) + actual = str(GoogleDocstring(cleandoc(PEP526GoogleClass.__doc__), config, app=None, what="class", + obj=PEP526GoogleClass)) + expected = """\ +Sample class with PEP 526 annotations and google docstring + +.. attribute:: attr1 + + Attr1 description. + + :type: int + +.. attribute:: attr2 + + Attr2 description. + + :type: str +""" + assert expected == actual + + def test_preprocess_types(self): + docstring = """\ +Do as you please + +Yield: + str:Extended +""" + actual = str(GoogleDocstring(docstring)) + expected = """\ +Do as you please + +:Yields: *str* -- Extended +""" + assert expected == actual + + config = Config(napoleon_preprocess_types=True) + actual = str(GoogleDocstring(docstring, config)) + expected = """\ +Do as you please + +:Yields: :py:class:`str` -- Extended +""" + assert expected == actual + + +class TestNumpyDocstring: + docstrings = [( + """Single line summary""", + """Single line summary""", + ), ( + """ + Single line summary + + Extended description + + """, + """ + Single line summary + + Extended description + """, + ), ( + """ + Single line summary + + Parameters + ---------- + arg1:str + Extended + description of arg1 + """, + """ + Single line summary + + :Parameters: **arg1** (:class:`str`) -- Extended + description of arg1 + """, + ), ( + """ + Single line summary + + Parameters + ---------- + arg1:str + Extended + description of arg1 + arg2 : int + Extended + description of arg2 + + Keyword Arguments + ----------------- + kwarg1:str + Extended + description of kwarg1 + kwarg2 : int + Extended + description of kwarg2 + """, + """ + Single line summary + + :Parameters: * **arg1** (:class:`str`) -- Extended + description of arg1 + * **arg2** (:class:`int`) -- Extended + description of arg2 + + :Keyword Arguments: * **kwarg1** (:class:`str`) -- Extended + description of kwarg1 + * **kwarg2** (:class:`int`) -- Extended + description of kwarg2 + """, + ), ( + """ + Single line summary + + Return + ------ + str + Extended + description of return value + """, + """ + Single line summary + + :returns: :class:`str` -- Extended + description of return value + """, + ), ( + """ + Single line summary + + Returns + ------- + str + Extended + description of return value + """, + """ + Single line summary + + :returns: :class:`str` -- Extended + description of return value + """, + ), ( + """ + Single line summary + + Parameters + ---------- + arg1:str + Extended description of arg1 + *args: + Variable length argument list. + **kwargs: + Arbitrary keyword arguments. + """, + """ + Single line summary + + :Parameters: * **arg1** (:class:`str`) -- Extended description of arg1 + * **\\*args** -- Variable length argument list. + * **\\*\\*kwargs** -- Arbitrary keyword arguments. + """, + ), ( + """ + Single line summary + + Parameters + ---------- + arg1:str + Extended description of arg1 + *args, **kwargs: + Variable length argument list and arbitrary keyword arguments. + """, + """ + Single line summary + + :Parameters: * **arg1** (:class:`str`) -- Extended description of arg1 + * **\\*args, \\*\\*kwargs** -- Variable length argument list and arbitrary keyword arguments. + """, + ), ( + """ + Single line summary + + Receive + ------- + arg1:str + Extended + description of arg1 + arg2 : int + Extended + description of arg2 + """, + """ + Single line summary + + :Receives: * **arg1** (:class:`str`) -- Extended + description of arg1 + * **arg2** (:class:`int`) -- Extended + description of arg2 + """, + ), ( + """ + Single line summary + + Receives + -------- + arg1:str + Extended + description of arg1 + arg2 : int + Extended + description of arg2 + """, + """ + Single line summary + + :Receives: * **arg1** (:class:`str`) -- Extended + description of arg1 + * **arg2** (:class:`int`) -- Extended + description of arg2 + """, + ), ( + """ + Single line summary + + Yield + ----- + str + Extended + description of yielded value + """, + """ + Single line summary + + :Yields: :class:`str` -- Extended + description of yielded value + """, + ), ( + """ + Single line summary + + Yields + ------ + str + Extended + description of yielded value + """, + """ + Single line summary + + :Yields: :class:`str` -- Extended + description of yielded value + """, + )] + + def test_sphinx_admonitions(self): + admonition_map = { + 'Attention': 'attention', + 'Caution': 'caution', + 'Danger': 'danger', + 'Error': 'error', + 'Hint': 'hint', + 'Important': 'important', + 'Note': 'note', + 'Tip': 'tip', + 'Todo': 'todo', + 'Warning': 'warning', + 'Warnings': 'warning', + } + config = Config() + for section, admonition in admonition_map.items(): + # Multiline + actual = str(NumpyDocstring(f"{section}\n" + f"{'-' * len(section)}\n" + " this is the first line\n" + "\n" + " and this is the second line\n", + config)) + expect = (f".. {admonition}::\n" + "\n" + " this is the first line\n" + " \n" + " and this is the second line\n" + ) + assert expect == actual + + # Single line + actual = str(NumpyDocstring(f"{section}\n" + f"{'-' * len(section)}\n" + f" this is a single line\n", + config)) + expect = f".. {admonition}:: this is a single line\n" + assert expect == actual + + def test_docstrings(self): + config = Config( + napoleon_use_param=False, + napoleon_use_rtype=False, + napoleon_use_keyword=False, + napoleon_preprocess_types=True) + for docstring, expected in self.docstrings: + actual = str(NumpyDocstring(dedent(docstring), config)) + expected = dedent(expected) + assert expected == actual + + def test_type_preprocessor(self): + docstring = dedent(""" + Single line summary + + Parameters + ---------- + arg1:str + Extended + description of arg1 + """) + + config = Config(napoleon_preprocess_types=False, napoleon_use_param=False) + actual = str(NumpyDocstring(docstring, config)) + expected = dedent(""" + Single line summary + + :Parameters: **arg1** (*str*) -- Extended + description of arg1 + """) + assert expected == actual + + def test_parameters_with_class_reference(self): + docstring = """\ +Parameters +---------- +param1 : :class:`MyClass <name.space.MyClass>` instance + +Other Parameters +---------------- +param2 : :class:`MyClass <name.space.MyClass>` instance + +""" + + config = Config(napoleon_use_param=False) + actual = str(NumpyDocstring(docstring, config)) + expected = """\ +:Parameters: **param1** (:class:`MyClass <name.space.MyClass>` instance) + +:Other Parameters: **param2** (:class:`MyClass <name.space.MyClass>` instance) +""" + assert expected == actual + + config = Config(napoleon_use_param=True) + actual = str(NumpyDocstring(docstring, config)) + expected = """\ +:param param1: +:type param1: :class:`MyClass <name.space.MyClass>` instance + +:param param2: +:type param2: :class:`MyClass <name.space.MyClass>` instance +""" + assert expected == actual + + def test_multiple_parameters(self): + docstring = """\ +Parameters +---------- +x1, x2 : array_like + Input arrays, description of ``x1``, ``x2``. + +""" + + config = Config(napoleon_use_param=False) + actual = str(NumpyDocstring(docstring, config)) + expected = """\ +:Parameters: **x1, x2** (*array_like*) -- Input arrays, description of ``x1``, ``x2``. +""" + assert expected == actual + + config = Config(napoleon_use_param=True) + actual = str(NumpyDocstring(dedent(docstring), config)) + expected = """\ +:param x1: Input arrays, description of ``x1``, ``x2``. +:type x1: array_like +:param x2: Input arrays, description of ``x1``, ``x2``. +:type x2: array_like +""" + assert expected == actual + + def test_parameters_without_class_reference(self): + docstring = """\ +Parameters +---------- +param1 : MyClass instance + +""" + + config = Config(napoleon_use_param=False) + actual = str(NumpyDocstring(docstring, config)) + expected = """\ +:Parameters: **param1** (*MyClass instance*) +""" + assert expected == actual + + config = Config(napoleon_use_param=True) + actual = str(NumpyDocstring(dedent(docstring), config)) + expected = """\ +:param param1: +:type param1: MyClass instance +""" + assert expected == actual + + def test_see_also_refs(self): + docstring = """\ +numpy.multivariate_normal(mean, cov, shape=None, spam=None) + +See Also +-------- +some, other, funcs +otherfunc : relationship + +""" + + actual = str(NumpyDocstring(docstring)) + + expected = """\ +numpy.multivariate_normal(mean, cov, shape=None, spam=None) + +.. seealso:: + + :obj:`some`, :obj:`other`, :obj:`funcs` + \n\ + :obj:`otherfunc` + relationship +""" + assert expected == actual + + docstring = """\ +numpy.multivariate_normal(mean, cov, shape=None, spam=None) + +See Also +-------- +some, other, funcs +otherfunc : relationship + +""" + + config = Config() + app = mock.Mock() + actual = str(NumpyDocstring(docstring, config, app, "method")) + + expected = """\ +numpy.multivariate_normal(mean, cov, shape=None, spam=None) + +.. seealso:: + + :obj:`some`, :obj:`other`, :obj:`funcs` + \n\ + :obj:`otherfunc` + relationship +""" + assert expected == actual + + docstring = """\ +numpy.multivariate_normal(mean, cov, shape=None, spam=None) + +See Also +-------- +some, other, :func:`funcs` +otherfunc : relationship + +""" + translations = { + "other": "MyClass.other", + "otherfunc": ":func:`~my_package.otherfunc`", + } + config = Config(napoleon_type_aliases=translations) + app = mock.Mock() + actual = str(NumpyDocstring(docstring, config, app, "method")) + + expected = """\ +numpy.multivariate_normal(mean, cov, shape=None, spam=None) + +.. seealso:: + + :obj:`some`, :obj:`MyClass.other`, :func:`funcs` + \n\ + :func:`~my_package.otherfunc` + relationship +""" + assert expected == actual + + def test_colon_in_return_type(self): + docstring = """ +Summary + +Returns +------- +:py:class:`~my_mod.my_class` + an instance of :py:class:`~my_mod.my_class` +""" + + expected = """ +Summary + +:returns: an instance of :py:class:`~my_mod.my_class` +:rtype: :py:class:`~my_mod.my_class` +""" + + config = Config() + app = mock.Mock() + actual = str(NumpyDocstring(docstring, config, app, "method")) + + assert expected == actual + + def test_underscore_in_attribute(self): + docstring = """ +Attributes +---------- + +arg_ : type + some description +""" + + expected = """ +:ivar arg_: some description +:vartype arg_: type +""" + + config = Config(napoleon_use_ivar=True) + app = mock.Mock() + actual = str(NumpyDocstring(docstring, config, app, "class")) + + assert expected == actual + + def test_underscore_in_attribute_strip_signature_backslash(self): + docstring = """ +Attributes +---------- + +arg_ : type + some description +""" + + expected = """ +:ivar arg\\_: some description +:vartype arg\\_: type +""" + + config = Config(napoleon_use_ivar=True) + config.strip_signature_backslash = True + app = mock.Mock() + actual = str(NumpyDocstring(docstring, config, app, "class")) + + assert expected == actual + + def test_return_types(self): + docstring = dedent(""" + Returns + ------- + DataFrame + a dataframe + """) + expected = dedent(""" + :returns: a dataframe + :rtype: :class:`~pandas.DataFrame` + """) + translations = { + "DataFrame": "~pandas.DataFrame", + } + config = Config( + napoleon_use_param=True, + napoleon_use_rtype=True, + napoleon_preprocess_types=True, + napoleon_type_aliases=translations, + ) + actual = str(NumpyDocstring(docstring, config)) + assert expected == actual + + def test_yield_types(self): + docstring = dedent(""" + Example Function + + Yields + ------ + scalar or array-like + The result of the computation + """) + expected = dedent(""" + Example Function + + :Yields: :term:`scalar` or :class:`array-like <numpy.ndarray>` -- The result of the computation + """) + translations = { + "scalar": ":term:`scalar`", + "array-like": ":class:`array-like <numpy.ndarray>`", + } + config = Config(napoleon_type_aliases=translations, napoleon_preprocess_types=True) + app = mock.Mock() + actual = str(NumpyDocstring(docstring, config, app, "method")) + assert expected == actual + + def test_raises_types(self): + docstrings = [(""" +Example Function + +Raises +------ + RuntimeError + + A setting wasn't specified, or was invalid. + ValueError + + Something something value error. + +""", """ +Example Function + +:raises RuntimeError: A setting wasn't specified, or was invalid. +:raises ValueError: Something something value error. +"""), + ################################ + (""" +Example Function + +Raises +------ +InvalidDimensionsError + +""", """ +Example Function + +:raises InvalidDimensionsError: +"""), + ################################ + (""" +Example Function + +Raises +------ +Invalid Dimensions Error + +""", """ +Example Function + +:raises Invalid Dimensions Error: +"""), + ################################ + (""" +Example Function + +Raises +------ +Invalid Dimensions Error + With description + +""", """ +Example Function + +:raises Invalid Dimensions Error: With description +"""), + ################################ + (""" +Example Function + +Raises +------ +InvalidDimensionsError + If the dimensions couldn't be parsed. + +""", """ +Example Function + +:raises InvalidDimensionsError: If the dimensions couldn't be parsed. +"""), + ################################ + (""" +Example Function + +Raises +------ +Invalid Dimensions Error + If the dimensions couldn't be parsed. + +""", """ +Example Function + +:raises Invalid Dimensions Error: If the dimensions couldn't be parsed. +"""), + ################################ + (""" +Example Function + +Raises +------ +If the dimensions couldn't be parsed. + +""", """ +Example Function + +:raises If the dimensions couldn't be parsed.: +"""), + ################################ + (""" +Example Function + +Raises +------ +:class:`exc.InvalidDimensionsError` + +""", """ +Example Function + +:raises exc.InvalidDimensionsError: +"""), + ################################ + (""" +Example Function + +Raises +------ +:class:`exc.InvalidDimensionsError` + If the dimensions couldn't be parsed. + +""", """ +Example Function + +:raises exc.InvalidDimensionsError: If the dimensions couldn't be parsed. +"""), + ################################ + (""" +Example Function + +Raises +------ +:class:`exc.InvalidDimensionsError` + If the dimensions couldn't be parsed, + then a :class:`exc.InvalidDimensionsError` will be raised. + +""", """ +Example Function + +:raises exc.InvalidDimensionsError: If the dimensions couldn't be parsed, + then a :class:`exc.InvalidDimensionsError` will be raised. +"""), + ################################ + (""" +Example Function + +Raises +------ +:class:`exc.InvalidDimensionsError` + If the dimensions couldn't be parsed. +:class:`exc.InvalidArgumentsError` + If the arguments are invalid. + +""", """ +Example Function + +:raises exc.InvalidDimensionsError: If the dimensions couldn't be parsed. +:raises exc.InvalidArgumentsError: If the arguments are invalid. +"""), + ################################ + (""" +Example Function + +Raises +------ +CustomError + If the dimensions couldn't be parsed. + +""", """ +Example Function + +:raises package.CustomError: If the dimensions couldn't be parsed. +"""), + ################################ + (""" +Example Function + +Raises +------ +AnotherError + If the dimensions couldn't be parsed. + +""", """ +Example Function + +:raises ~package.AnotherError: If the dimensions couldn't be parsed. +"""), + ################################ + (""" +Example Function + +Raises +------ +:class:`exc.InvalidDimensionsError` +:class:`exc.InvalidArgumentsError` + +""", """ +Example Function + +:raises exc.InvalidDimensionsError: +:raises exc.InvalidArgumentsError: +""")] + for docstring, expected in docstrings: + translations = { + "CustomError": "package.CustomError", + "AnotherError": ":py:exc:`~package.AnotherError`", + } + config = Config(napoleon_type_aliases=translations, napoleon_preprocess_types=True) + app = mock.Mock() + actual = str(NumpyDocstring(docstring, config, app, "method")) + assert expected == actual + + def test_xrefs_in_return_type(self): + docstring = """ +Example Function + +Returns +------- +:class:`numpy.ndarray` + A :math:`n \\times 2` array containing + a bunch of math items +""" + expected = """ +Example Function + +:returns: A :math:`n \\times 2` array containing + a bunch of math items +:rtype: :class:`numpy.ndarray` +""" + config = Config() + app = mock.Mock() + actual = str(NumpyDocstring(docstring, config, app, "method")) + assert expected == actual + + def test_section_header_underline_length(self): + docstrings = [(""" +Summary line + +Example +- +Multiline example +body + +""", """ +Summary line + +Example +- +Multiline example +body +"""), + ################################ + (""" +Summary line + +Example +-- +Multiline example +body + +""", """ +Summary line + +.. rubric:: Example + +Multiline example +body +"""), + ################################ + (""" +Summary line + +Example +------- +Multiline example +body + +""", """ +Summary line + +.. rubric:: Example + +Multiline example +body +"""), + ################################ + (""" +Summary line + +Example +------------ +Multiline example +body + +""", """ +Summary line + +.. rubric:: Example + +Multiline example +body +""")] + for docstring, expected in docstrings: + actual = str(NumpyDocstring(docstring)) + assert expected == actual + + def test_list_in_parameter_description(self): + docstring = """One line summary. + +Parameters +---------- +no_list : int +one_bullet_empty : int + * +one_bullet_single_line : int + - first line +one_bullet_two_lines : int + + first line + continued +two_bullets_single_line : int + - first line + - second line +two_bullets_two_lines : int + * first line + continued + * second line + continued +one_enumeration_single_line : int + 1. first line +one_enumeration_two_lines : int + 1) first line + continued +two_enumerations_one_line : int + (iii) first line + (iv) second line +two_enumerations_two_lines : int + a. first line + continued + b. second line + continued +one_definition_one_line : int + item 1 + first line +one_definition_two_lines : int + item 1 + first line + continued +two_definitions_one_line : int + item 1 + first line + item 2 + second line +two_definitions_two_lines : int + item 1 + first line + continued + item 2 + second line + continued +one_definition_blank_line : int + item 1 + + first line + + extra first line + +two_definitions_blank_lines : int + item 1 + + first line + + extra first line + + item 2 + + second line + + extra second line + +definition_after_normal_text : int + text line + + item 1 + first line +""" + + expected = """One line summary. + +:param no_list: +:type no_list: int +:param one_bullet_empty: + * +:type one_bullet_empty: int +:param one_bullet_single_line: + - first line +:type one_bullet_single_line: int +:param one_bullet_two_lines: + + first line + continued +:type one_bullet_two_lines: int +:param two_bullets_single_line: + - first line + - second line +:type two_bullets_single_line: int +:param two_bullets_two_lines: + * first line + continued + * second line + continued +:type two_bullets_two_lines: int +:param one_enumeration_single_line: + 1. first line +:type one_enumeration_single_line: int +:param one_enumeration_two_lines: + 1) first line + continued +:type one_enumeration_two_lines: int +:param two_enumerations_one_line: + (iii) first line + (iv) second line +:type two_enumerations_one_line: int +:param two_enumerations_two_lines: + a. first line + continued + b. second line + continued +:type two_enumerations_two_lines: int +:param one_definition_one_line: + item 1 + first line +:type one_definition_one_line: int +:param one_definition_two_lines: + item 1 + first line + continued +:type one_definition_two_lines: int +:param two_definitions_one_line: + item 1 + first line + item 2 + second line +:type two_definitions_one_line: int +:param two_definitions_two_lines: + item 1 + first line + continued + item 2 + second line + continued +:type two_definitions_two_lines: int +:param one_definition_blank_line: + item 1 + + first line + + extra first line +:type one_definition_blank_line: int +:param two_definitions_blank_lines: + item 1 + + first line + + extra first line + + item 2 + + second line + + extra second line +:type two_definitions_blank_lines: int +:param definition_after_normal_text: text line + + item 1 + first line +:type definition_after_normal_text: int +""" + config = Config(napoleon_use_param=True) + actual = str(NumpyDocstring(docstring, config)) + assert expected == actual + + expected = """One line summary. + +:Parameters: * **no_list** (:class:`int`) + * **one_bullet_empty** (:class:`int`) -- + + * + * **one_bullet_single_line** (:class:`int`) -- + + - first line + * **one_bullet_two_lines** (:class:`int`) -- + + + first line + continued + * **two_bullets_single_line** (:class:`int`) -- + + - first line + - second line + * **two_bullets_two_lines** (:class:`int`) -- + + * first line + continued + * second line + continued + * **one_enumeration_single_line** (:class:`int`) -- + + 1. first line + * **one_enumeration_two_lines** (:class:`int`) -- + + 1) first line + continued + * **two_enumerations_one_line** (:class:`int`) -- + + (iii) first line + (iv) second line + * **two_enumerations_two_lines** (:class:`int`) -- + + a. first line + continued + b. second line + continued + * **one_definition_one_line** (:class:`int`) -- + + item 1 + first line + * **one_definition_two_lines** (:class:`int`) -- + + item 1 + first line + continued + * **two_definitions_one_line** (:class:`int`) -- + + item 1 + first line + item 2 + second line + * **two_definitions_two_lines** (:class:`int`) -- + + item 1 + first line + continued + item 2 + second line + continued + * **one_definition_blank_line** (:class:`int`) -- + + item 1 + + first line + + extra first line + * **two_definitions_blank_lines** (:class:`int`) -- + + item 1 + + first line + + extra first line + + item 2 + + second line + + extra second line + * **definition_after_normal_text** (:class:`int`) -- text line + + item 1 + first line +""" + config = Config(napoleon_use_param=False, napoleon_preprocess_types=True) + actual = str(NumpyDocstring(docstring, config)) + assert expected == actual + + def test_token_type(self): + tokens = ( + ("1", "literal"), + ("-4.6", "literal"), + ("2j", "literal"), + ("'string'", "literal"), + ('"another_string"', "literal"), + ("{1, 2}", "literal"), + ("{'va{ue', 'set'}", "literal"), + ("optional", "control"), + ("default", "control"), + (", ", "delimiter"), + (" of ", "delimiter"), + (" or ", "delimiter"), + (": ", "delimiter"), + ("True", "obj"), + ("None", "obj"), + ("name", "obj"), + (":py:class:`Enum`", "reference"), + ) + + for token, expected in tokens: + actual = _token_type(token) + assert expected == actual + + def test_tokenize_type_spec(self): + specs = ( + "str", + "defaultdict", + "int, float, or complex", + "int or float or None, optional", + "list of list of int or float, optional", + "tuple of list of str, float, or int", + '{"F", "C", "N"}', + "{'F', 'C', 'N'}, default: 'F'", + "{'F', 'C', 'N or C'}, default 'F'", + "str, default: 'F or C'", + "int, default: None", + "int, default None", + "int, default :obj:`None`", + '"ma{icious"', + r"'with \'quotes\''", + ) + + tokens = ( + ["str"], + ["defaultdict"], + ["int", ", ", "float", ", or ", "complex"], + ["int", " or ", "float", " or ", "None", ", ", "optional"], + ["list", " of ", "list", " of ", "int", " or ", "float", ", ", "optional"], + ["tuple", " of ", "list", " of ", "str", ", ", "float", ", or ", "int"], + ["{", '"F"', ", ", '"C"', ", ", '"N"', "}"], + ["{", "'F'", ", ", "'C'", ", ", "'N'", "}", ", ", "default", ": ", "'F'"], + ["{", "'F'", ", ", "'C'", ", ", "'N or C'", "}", ", ", "default", " ", "'F'"], + ["str", ", ", "default", ": ", "'F or C'"], + ["int", ", ", "default", ": ", "None"], + ["int", ", ", "default", " ", "None"], + ["int", ", ", "default", " ", ":obj:`None`"], + ['"ma{icious"'], + [r"'with \'quotes\''"], + ) + + for spec, expected in zip(specs, tokens): + actual = _tokenize_type_spec(spec) + assert expected == actual + + def test_recombine_set_tokens(self): + tokens = ( + ["{", "1", ", ", "2", "}"], + ["{", '"F"', ", ", '"C"', ", ", '"N"', "}", ", ", "optional"], + ["{", "'F'", ", ", "'C'", ", ", "'N'", "}", ", ", "default", ": ", "None"], + ["{", "'F'", ", ", "'C'", ", ", "'N'", "}", ", ", "default", " ", "None"], + ) + + combined_tokens = ( + ["{1, 2}"], + ['{"F", "C", "N"}', ", ", "optional"], + ["{'F', 'C', 'N'}", ", ", "default", ": ", "None"], + ["{'F', 'C', 'N'}", ", ", "default", " ", "None"], + ) + + for tokens_, expected in zip(tokens, combined_tokens): + actual = _recombine_set_tokens(tokens_) + assert expected == actual + + def test_recombine_set_tokens_invalid(self): + tokens = ( + ["{", "1", ", ", "2"], + ['"F"', ", ", '"C"', ", ", '"N"', "}", ", ", "optional"], + ["{", "1", ", ", "2", ", ", "default", ": ", "None"], + ) + combined_tokens = ( + ["{1, 2"], + ['"F"', ", ", '"C"', ", ", '"N"', "}", ", ", "optional"], + ["{1, 2", ", ", "default", ": ", "None"], + ) + + for tokens_, expected in zip(tokens, combined_tokens): + actual = _recombine_set_tokens(tokens_) + assert expected == actual + + def test_convert_numpy_type_spec(self): + translations = { + "DataFrame": "pandas.DataFrame", + } + + specs = ( + "", + "optional", + "str, optional", + "int or float or None, default: None", + "list of tuple of str, optional", + "int, default None", + '{"F", "C", "N"}', + "{'F', 'C', 'N'}, default: 'N'", + "{'F', 'C', 'N'}, default 'N'", + "DataFrame, optional", + ) + + converted = ( + "", + "*optional*", + ":class:`str`, *optional*", + ":class:`int` or :class:`float` or :obj:`None`, *default*: :obj:`None`", + ":class:`list` of :class:`tuple` of :class:`str`, *optional*", + ":class:`int`, *default* :obj:`None`", + '``{"F", "C", "N"}``', + "``{'F', 'C', 'N'}``, *default*: ``'N'``", + "``{'F', 'C', 'N'}``, *default* ``'N'``", + ":class:`pandas.DataFrame`, *optional*", + ) + + for spec, expected in zip(specs, converted): + actual = _convert_numpy_type_spec(spec, translations=translations) + assert expected == actual + + def test_parameter_types(self): + docstring = dedent("""\ + Parameters + ---------- + param1 : DataFrame + the data to work on + param2 : int or float or None, optional + a parameter with different types + param3 : dict-like, optional + a optional mapping + param4 : int or float or None, optional + a optional parameter with different types + param5 : {"F", "C", "N"}, optional + a optional parameter with fixed values + param6 : int, default None + different default format + param7 : mapping of hashable to str, optional + a optional mapping + param8 : ... or Ellipsis + ellipsis + param9 : tuple of list of int + a parameter with tuple of list of int + """) + expected = dedent("""\ + :param param1: the data to work on + :type param1: :class:`DataFrame` + :param param2: a parameter with different types + :type param2: :class:`int` or :class:`float` or :obj:`None`, *optional* + :param param3: a optional mapping + :type param3: :term:`dict-like <mapping>`, *optional* + :param param4: a optional parameter with different types + :type param4: :class:`int` or :class:`float` or :obj:`None`, *optional* + :param param5: a optional parameter with fixed values + :type param5: ``{"F", "C", "N"}``, *optional* + :param param6: different default format + :type param6: :class:`int`, *default* :obj:`None` + :param param7: a optional mapping + :type param7: :term:`mapping` of :term:`hashable` to :class:`str`, *optional* + :param param8: ellipsis + :type param8: :obj:`... <Ellipsis>` or :obj:`Ellipsis` + :param param9: a parameter with tuple of list of int + :type param9: :class:`tuple` of :class:`list` of :class:`int` + """) + translations = { + "dict-like": ":term:`dict-like <mapping>`", + "mapping": ":term:`mapping`", + "hashable": ":term:`hashable`", + } + config = Config( + napoleon_use_param=True, + napoleon_use_rtype=True, + napoleon_preprocess_types=True, + napoleon_type_aliases=translations, + ) + actual = str(NumpyDocstring(docstring, config)) + assert expected == actual + + def test_token_type_invalid(self, warning): + tokens = ( + "{1, 2", + "}", + "'abc", + "def'", + '"ghi', + 'jkl"', + ) + errors = ( + r".+: invalid value set \(missing closing brace\):", + r".+: invalid value set \(missing opening brace\):", + r".+: malformed string literal \(missing closing quote\):", + r".+: malformed string literal \(missing opening quote\):", + r".+: malformed string literal \(missing closing quote\):", + r".+: malformed string literal \(missing opening quote\):", + ) + for token, error in zip(tokens, errors): + try: + _token_type(token) + finally: + raw_warnings = warning.getvalue() + warnings = [w for w in raw_warnings.split("\n") if w.strip()] + + assert len(warnings) == 1 + assert re.compile(error).match(warnings[0]) + warning.truncate(0) + + @pytest.mark.parametrize( + ("name", "expected"), + [ + ("x, y, z", "x, y, z"), + ("*args, **kwargs", r"\*args, \*\*kwargs"), + ("*x, **y", r"\*x, \*\*y"), + ], + ) + def test_escape_args_and_kwargs(self, name, expected): + numpy_docstring = NumpyDocstring("") + actual = numpy_docstring._escape_args_and_kwargs(name) + + assert actual == expected + + def test_pep526_annotations(self): + # test class attributes annotations + config = Config( + napoleon_attr_annotations=True, + ) + actual = str(NumpyDocstring(cleandoc(PEP526NumpyClass.__doc__), config, app=None, what="class", + obj=PEP526NumpyClass)) + expected = """\ +Sample class with PEP 526 annotations and numpy docstring + +.. attribute:: attr1 + + Attr1 description + + :type: int + +.. attribute:: attr2 + + Attr2 description + + :type: str +""" + print(actual) + assert expected == actual + + +@pytest.mark.sphinx('text', testroot='ext-napoleon', + confoverrides={'autodoc_typehints': 'description', + 'autodoc_typehints_description_target': 'all'}) +def test_napoleon_and_autodoc_typehints_description_all(app, status, warning): + app.build() + content = (app.outdir / 'typehints.txt').read_text(encoding='utf-8') + assert content == ( + 'typehints\n' + '*********\n' + '\n' + 'mypackage.typehints.hello(x, *args, **kwargs)\n' + '\n' + ' Parameters:\n' + ' * **x** (*int*) -- X\n' + '\n' + ' * ***args** (*int*) -- Additional arguments.\n' + '\n' + ' * ****kwargs** (*int*) -- Extra arguments.\n' + '\n' + ' Return type:\n' + ' None\n' + ) + + +@pytest.mark.sphinx('text', testroot='ext-napoleon', + confoverrides={'autodoc_typehints': 'description', + 'autodoc_typehints_description_target': 'documented_params'}) +def test_napoleon_and_autodoc_typehints_description_documented_params(app, status, warning): + app.build() + content = (app.outdir / 'typehints.txt').read_text(encoding='utf-8') + assert content == ( + 'typehints\n' + '*********\n' + '\n' + 'mypackage.typehints.hello(x, *args, **kwargs)\n' + '\n' + ' Parameters:\n' + ' * **x** (*int*) -- X\n' + '\n' + ' * ***args** (*int*) -- Additional arguments.\n' + '\n' + ' * ****kwargs** (*int*) -- Extra arguments.\n' + ) diff --git a/tests/test_ext_todo.py b/tests/test_ext_todo.py new file mode 100644 index 0000000..7d39495 --- /dev/null +++ b/tests/test_ext_todo.py @@ -0,0 +1,109 @@ +"""Test sphinx.ext.todo extension.""" + +import re + +import pytest + + +@pytest.mark.sphinx('html', testroot='ext-todo', freshenv=True, + confoverrides={'todo_include_todos': True, 'todo_emit_warnings': True}) +def test_todo(app, status, warning): + todos = [] + + def on_todo_defined(app, node): + todos.append(node) + + app.connect('todo-defined', on_todo_defined) + app.builder.build_all() + + # check todolist + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert ('<p class="admonition-title">Todo</p>\n' + '<p>todo in foo</p>') in content + + assert ('<p class="admonition-title">Todo</p>\n' + '<p>todo in bar</p>') in content + + # check todo + content = (app.outdir / 'foo.html').read_text(encoding='utf8') + assert ('<p class="admonition-title">Todo</p>\n' + '<p>todo in foo</p>') in content + + assert ('<p class="admonition-title">Todo</p>\n' + '<p>todo in param field</p>') in content + + # check emitted warnings + assert 'WARNING: TODO entry found: todo in foo' in warning.getvalue() + assert 'WARNING: TODO entry found: todo in bar' in warning.getvalue() + + # check handled event + assert len(todos) == 3 + assert {todo[1].astext() for todo in todos} == {'todo in foo', + 'todo in bar', + 'todo in param field'} + + +@pytest.mark.sphinx('html', testroot='ext-todo', freshenv=True, + confoverrides={'todo_include_todos': False, 'todo_emit_warnings': True}) +def test_todo_not_included(app, status, warning): + todos = [] + + def on_todo_defined(app, node): + todos.append(node) + + app.connect('todo-defined', on_todo_defined) + app.builder.build_all() + + # check todolist + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert ('<p class="admonition-title">Todo</p>\n' + '<p>todo in foo</p>') not in content + + assert ('<p class="admonition-title">Todo</p>\n' + '<p>todo in bar</p>') not in content + + # check todo + content = (app.outdir / 'foo.html').read_text(encoding='utf8') + assert ('<p class="admonition-title">Todo</p>\n' + '<p>todo in foo</p>') not in content + + # check emitted warnings + assert 'WARNING: TODO entry found: todo in foo' in warning.getvalue() + assert 'WARNING: TODO entry found: todo in bar' in warning.getvalue() + + # check handled event + assert len(todos) == 3 + assert {todo[1].astext() for todo in todos} == {'todo in foo', + 'todo in bar', + 'todo in param field'} + + +@pytest.mark.sphinx('latex', testroot='ext-todo', freshenv=True, + confoverrides={'todo_include_todos': True}) +def test_todo_valid_link(app, status, warning): + """ + Test that the inserted "original entry" links for todo items have a target + that exists in the LaTeX output. The target was previously incorrectly + omitted (GitHub issue #1020). + """ + + # Ensure the LaTeX output is built. + app.builder.build_all() + + content = (app.outdir / 'python.tex').read_text(encoding='utf8') + + # Look for the link to foo. Note that there are two of them because the + # source document uses todolist twice. We could equally well look for links + # to bar. + link = (r'{\\hyperref\[\\detokenize{(.*?foo.*?)}]{\\sphinxcrossref{' + r'\\sphinxstyleemphasis{original entry}}}}') + m = re.findall(link, content) + assert len(m) == 4 + target = m[0] + + # Look for the targets of this link. + labels = re.findall(r'\\label{\\detokenize{([^}]*)}}', content) + matched = [l for l in labels if l == target] + + # If everything is correct we should have exactly one target. + assert len(matched) == 1 diff --git a/tests/test_ext_viewcode.py b/tests/test_ext_viewcode.py new file mode 100644 index 0000000..a1a0a6d --- /dev/null +++ b/tests/test_ext_viewcode.py @@ -0,0 +1,137 @@ +"""Test sphinx.ext.viewcode extension.""" + +import re +import shutil + +import pytest + + +def check_viewcode_output(app, warning): + warnings = re.sub(r'\\+', '/', warning.getvalue()) + assert re.findall( + r"index.rst:\d+: WARNING: Object named 'func1' not found in include " + + r"file .*/spam/__init__.py'", + warnings, + ) + + result = (app.outdir / 'index.html').read_text(encoding='utf8') + assert result.count('href="_modules/spam/mod1.html#func1"') == 2 + assert result.count('href="_modules/spam/mod2.html#func2"') == 2 + assert result.count('href="_modules/spam/mod1.html#Class1"') == 2 + assert result.count('href="_modules/spam/mod2.html#Class2"') == 2 + assert result.count('@decorator') == 1 + + # test that the class attribute is correctly documented + assert result.count('this is Class3') == 2 + assert 'this is the class attribute class_attr' in result + # the next assert fails, until the autodoc bug gets fixed + assert result.count('this is the class attribute class_attr') == 2 + + result = (app.outdir / '_modules/spam/mod1.html').read_text(encoding='utf8') + result = re.sub('<span class="[^"]{,2}">', '<span>', result) # filter pygments classes + assert ('<div class="viewcode-block" id="Class1">\n' + '<a class="viewcode-back" href="../../index.html#spam.Class1">[docs]</a>\n') in result + assert '<span>@decorator</span>\n' in result + assert '<span>class</span> <span>Class1</span><span>:</span>\n' in result + assert '<span> </span><span>"""</span>\n' in result + assert '<span> this is Class1</span>\n' in result + assert '<span> """</span>\n' in result + + return result + + +@pytest.mark.sphinx(testroot='ext-viewcode', freshenv=True, + confoverrides={"viewcode_line_numbers": True}) +def test_viewcode_linenos(app, warning): + shutil.rmtree(app.outdir / '_modules', ignore_errors=True) + app.builder.build_all() + + result = check_viewcode_output(app, warning) + assert '<span class="linenos"> 1</span>' in result + + +@pytest.mark.sphinx(testroot='ext-viewcode', freshenv=True, + confoverrides={"viewcode_line_numbers": False}) +def test_viewcode(app, warning): + shutil.rmtree(app.outdir / '_modules', ignore_errors=True) + app.builder.build_all() + + result = check_viewcode_output(app, warning) + assert 'class="linenos">' not in result + + +@pytest.mark.sphinx('epub', testroot='ext-viewcode') +def test_viewcode_epub_default(app, status, warning): + shutil.rmtree(app.outdir) + app.builder.build_all() + + assert not (app.outdir / '_modules/spam/mod1.xhtml').exists() + + result = (app.outdir / 'index.xhtml').read_text(encoding='utf8') + assert result.count('href="_modules/spam/mod1.xhtml#func1"') == 0 + + +@pytest.mark.sphinx('epub', testroot='ext-viewcode', + confoverrides={'viewcode_enable_epub': True}) +def test_viewcode_epub_enabled(app, status, warning): + app.builder.build_all() + + assert (app.outdir / '_modules/spam/mod1.xhtml').exists() + + result = (app.outdir / 'index.xhtml').read_text(encoding='utf8') + assert result.count('href="_modules/spam/mod1.xhtml#func1"') == 2 + + +@pytest.mark.sphinx(testroot='ext-viewcode', tags=['test_linkcode']) +def test_linkcode(app, status, warning): + app.builder.build(['objects']) + + stuff = (app.outdir / 'objects.html').read_text(encoding='utf8') + + assert 'http://foobar/source/foolib.py' in stuff + assert 'http://foobar/js/' in stuff + assert 'http://foobar/c/' in stuff + assert 'http://foobar/cpp/' in stuff + + +@pytest.mark.sphinx(testroot='ext-viewcode-find', freshenv=True) +def test_local_source_files(app, status, warning): + def find_source(app, modname): + if modname == 'not_a_package': + source = (app.srcdir / 'not_a_package/__init__.py').read_text(encoding='utf8') + tags = { + 'func1': ('def', 1, 1), + 'Class1': ('class', 1, 1), + 'not_a_package.submodule.func1': ('def', 1, 1), + 'not_a_package.submodule.Class1': ('class', 1, 1), + } + else: + source = (app.srcdir / 'not_a_package/submodule.py').read_text(encoding='utf8') + tags = { + 'not_a_package.submodule.func1': ('def', 11, 15), + 'Class1': ('class', 19, 22), + 'not_a_package.submodule.Class1': ('class', 19, 22), + 'Class3': ('class', 25, 30), + 'not_a_package.submodule.Class3.class_attr': ('other', 29, 29), + } + return (source, tags) + + app.connect('viewcode-find-source', find_source) + app.builder.build_all() + + warnings = re.sub(r'\\+', '/', warning.getvalue()) + assert re.findall( + r"index.rst:\d+: WARNING: Object named 'func1' not found in include " + + r"file .*/not_a_package/__init__.py'", + warnings, + ) + + result = (app.outdir / 'index.html').read_text(encoding='utf8') + assert result.count('href="_modules/not_a_package.html#func1"') == 1 + assert result.count('href="_modules/not_a_package.html#not_a_package.submodule.func1"') == 1 + assert result.count('href="_modules/not_a_package/submodule.html#Class1"') == 1 + assert result.count('href="_modules/not_a_package/submodule.html#Class3"') == 1 + assert result.count('href="_modules/not_a_package/submodule.html#not_a_package.submodule.Class1"') == 1 + + assert result.count('href="_modules/not_a_package/submodule.html#not_a_package.submodule.Class3.class_attr"') == 1 + assert result.count('This is the class attribute class_attr') == 1 diff --git a/tests/test_extension.py b/tests/test_extension.py new file mode 100644 index 0000000..d74743c --- /dev/null +++ b/tests/test_extension.py @@ -0,0 +1,23 @@ +"""Test sphinx.extension module.""" + +import pytest + +from sphinx.errors import VersionRequirementError +from sphinx.extension import Extension, verify_needs_extensions + + +def test_needs_extensions(app): + # empty needs_extensions + assert app.config.needs_extensions == {} + verify_needs_extensions(app, app.config) + + # needs_extensions fulfilled + app.config.needs_extensions = {'test.extension': '3.9'} + app.extensions['test.extension'] = Extension('test.extension', 'test.extension', version='3.10') + verify_needs_extensions(app, app.config) + + # needs_extensions not fulfilled + app.config.needs_extensions = {'test.extension': '3.11'} + app.extensions['test.extension'] = Extension('test.extension', 'test.extension', version='3.10') + with pytest.raises(VersionRequirementError): + verify_needs_extensions(app, app.config) diff --git a/tests/test_highlighting.py b/tests/test_highlighting.py new file mode 100644 index 0000000..a33ebb3 --- /dev/null +++ b/tests/test_highlighting.py @@ -0,0 +1,104 @@ +"""Test the Pygments highlighting bridge.""" + +from unittest import mock + +from pygments.formatters.html import HtmlFormatter +from pygments.lexer import RegexLexer +from pygments.token import Name, Text + +from sphinx.highlighting import PygmentsBridge + + +class MyLexer(RegexLexer): + name = 'testlexer' + + tokens = { + 'root': [ + ('a', Name), + ('b', Text), + ], + } + + +class MyFormatter(HtmlFormatter): + def format(self, tokensource, outfile): + for tok in tokensource: + outfile.write(tok[1]) + + +class ComplainOnUnhighlighted(PygmentsBridge): + def unhighlighted(self, source): + raise AssertionError("should highlight %r" % source) + + +def test_add_lexer(app, status, warning): + app.add_lexer('test', MyLexer) + + bridge = PygmentsBridge('html') + ret = bridge.highlight_block('ab', 'test') + assert '<span class="n">a</span>b' in ret + + +def test_detect_interactive(): + bridge = ComplainOnUnhighlighted('html') + blocks = [ + """ + >>> testing() + True + """, + ] + for block in blocks: + ret = bridge.highlight_block(block.lstrip(), 'python') + assert ret.startswith("<div class=\"highlight\">") + + +def test_lexer_options(): + bridge = PygmentsBridge('html') + ret = bridge.highlight_block('//comment', 'php', opts={'startinline': True}) + assert '<span class="c1">//comment</span>' in ret + + +def test_set_formatter(): + PygmentsBridge.html_formatter = MyFormatter + try: + bridge = PygmentsBridge('html') + ret = bridge.highlight_block('foo\n', 'python') + assert ret == 'foo\n' + finally: + PygmentsBridge.html_formatter = HtmlFormatter + + +@mock.patch('sphinx.highlighting.logger') +def test_default_highlight(logger): + bridge = PygmentsBridge('html') + + # default: highlights as python3 + ret = bridge.highlight_block('print "Hello sphinx world"', 'default') + assert ret == ('<div class="highlight"><pre><span></span><span class="nb">print</span> ' + '<span class="s2">"Hello sphinx world"</span>\n</pre></div>\n') + + # default: fallbacks to none if highlighting failed + ret = bridge.highlight_block('reST ``like`` text', 'default') + assert ret == '<div class="highlight"><pre><span></span>reST ``like`` text\n</pre></div>\n' + + # python: highlights as python3 + ret = bridge.highlight_block('print("Hello sphinx world")', 'python') + assert ret == ('<div class="highlight"><pre><span></span><span class="nb">print</span>' + '<span class="p">(</span>' + '<span class="s2">"Hello sphinx world"</span>' + '<span class="p">)</span>\n</pre></div>\n') + + # python3: highlights as python3 + ret = bridge.highlight_block('print("Hello sphinx world")', 'python3') + assert ret == ('<div class="highlight"><pre><span></span><span class="nb">print</span>' + '<span class="p">(</span>' + '<span class="s2">"Hello sphinx world"</span>' + '<span class="p">)</span>\n</pre></div>\n') + + # python: raises error if highlighting failed + ret = bridge.highlight_block('reST ``like`` text', 'python') + logger.warning.assert_called_with('Lexing literal_block %r as "%s" resulted in an error at token: %r. ' + 'Retrying in relaxed mode.', + 'reST ``like`` text', 'python', '`', + type='misc', subtype='highlighting_failure', + location=None) diff --git a/tests/test_intl.py b/tests/test_intl.py new file mode 100644 index 0000000..a07ebfb --- /dev/null +++ b/tests/test_intl.py @@ -0,0 +1,1527 @@ +"""Test message patching for internationalization purposes. + +Runs the text builder in the test root. +""" + +import os +import os.path +import re +import shutil +import time +from pathlib import Path + +import pytest +from babel.messages import mofile, pofile +from babel.messages.catalog import Catalog +from docutils import nodes + +from sphinx import locale +from sphinx.testing.util import assert_node, etree_parse, strip_escseq +from sphinx.util.nodes import NodeMatcher + +sphinx_intl = pytest.mark.sphinx( + testroot='intl', + confoverrides={ + 'language': 'xx', 'locale_dirs': ['.'], + 'gettext_compact': False, + }, +) + + +def read_po(pathname): + with open(pathname, encoding='utf-8') as f: + return pofile.read_po(f) + + +def write_mo(pathname, po): + with open(pathname, 'wb') as f: + return mofile.write_mo(f, po) + + +@pytest.fixture(autouse=True) +def _setup_intl(app_params): + assert isinstance(app_params.kwargs['srcdir'], Path) + srcdir = app_params.kwargs['srcdir'] + for dirpath, _dirs, files in os.walk(srcdir): + dirpath = Path(dirpath) + for f in [f for f in files if f.endswith('.po')]: + po = str(dirpath / f) + mo = srcdir / 'xx' / 'LC_MESSAGES' / ( + os.path.relpath(po[:-3], srcdir) + '.mo') + if not mo.parent.exists(): + mo.parent.mkdir(parents=True, exist_ok=True) + + if not mo.exists() or os.stat(mo).st_mtime < os.stat(po).st_mtime: + # compile .mo file only if needed + write_mo(mo, read_po(po)) + + +@pytest.fixture(autouse=True) +def _info(app): + yield + print('# language:', app.config.language) + print('# locale_dirs:', app.config.locale_dirs) + + +def elem_gettexts(elem): + return [_f for _f in [s.strip() for s in elem.itertext()] if _f] + + +def elem_getref(elem): + return elem.attrib.get('refid') or elem.attrib.get('refuri') + + +def assert_elem(elem, texts=None, refs=None, names=None): + if texts is not None: + _texts = elem_gettexts(elem) + assert _texts == texts + if refs is not None: + _refs = [elem_getref(x) for x in elem.findall('reference')] + assert _refs == refs + if names is not None: + _names = elem.attrib.get('names').split() + assert _names == names + + +def assert_count(expected_expr, result, count): + find_pair = (expected_expr, result) + assert len(re.findall(*find_pair)) == count, find_pair + + +@sphinx_intl +@pytest.mark.sphinx('text') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_text_emit_warnings(app, warning): + app.build() + # test warnings in translation + warnings = getwarning(warning) + warning_expr = ('.*/warnings.txt:4:<translated>:1: ' + 'WARNING: Inline literal start-string without end-string.\n') + assert re.search(warning_expr, warnings), f'{warning_expr!r} did not match {warnings!r}' + + +@sphinx_intl +@pytest.mark.sphinx('text') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_text_warning_node(app): + app.build() + # test warnings in translation + result = (app.outdir / 'warnings.txt').read_text(encoding='utf8') + expect = ("3. I18N WITH REST WARNINGS" + "\n**************************\n" + "\nLINE OF >>``<<BROKEN LITERAL MARKUP.\n") + assert result == expect + + +@sphinx_intl +@pytest.mark.sphinx('text') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_text_title_underline(app): + app.build() + # --- simple translation; check title underlines + result = (app.outdir / 'bom.txt').read_text(encoding='utf8') + expect = ("2. Datei mit UTF-8" + "\n******************\n" # underline matches new translation + "\nThis file has umlauts: äöü.\n") + assert result == expect + + +@sphinx_intl +@pytest.mark.sphinx('text') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_text_subdirs(app): + app.build() + # --- check translation in subdirs + result = (app.outdir / 'subdir' / 'index.txt').read_text(encoding='utf8') + assert result.startswith('1. subdir contents\n******************\n') + + +@sphinx_intl +@pytest.mark.sphinx('text') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_text_inconsistency_warnings(app, warning): + app.build() + # --- check warnings for inconsistency in number of references + result = (app.outdir / 'refs_inconsistency.txt').read_text(encoding='utf8') + expect = ("8. I18N WITH REFS INCONSISTENCY" + "\n*******************************\n" + "\n* FOR CITATION [ref3].\n" + "\n* reference FOR reference.\n" + "\n* ORPHAN REFERENCE: I18N WITH REFS INCONSISTENCY.\n" + "\n[1] THIS IS A AUTO NUMBERED FOOTNOTE.\n" + "\n[ref2] THIS IS A CITATION.\n" + "\n[100] THIS IS A NUMBERED FOOTNOTE.\n") + assert result == expect + + warnings = getwarning(warning) + warning_fmt = ('.*/refs_inconsistency.txt:\\d+: ' + 'WARNING: inconsistent %(reftype)s in translated message.' + ' original: %(original)s, translated: %(translated)s\n') + expected_warning_expr = ( + warning_fmt % { + 'reftype': 'footnote references', + 'original': "\\['\\[#\\]_'\\]", + 'translated': "\\[\\]", + } + + warning_fmt % { + 'reftype': 'footnote references', + 'original': "\\['\\[100\\]_'\\]", + 'translated': "\\[\\]", + } + + warning_fmt % { + 'reftype': 'references', + 'original': "\\['reference_'\\]", + 'translated': "\\['reference_', 'reference_'\\]", + } + + warning_fmt % { + 'reftype': 'references', + 'original': "\\[\\]", + 'translated': "\\['`I18N WITH REFS INCONSISTENCY`_'\\]", + }) + assert re.search(expected_warning_expr, warnings), f'{expected_warning_expr!r} did not match {warnings!r}' + + expected_citation_warning_expr = ( + '.*/refs_inconsistency.txt:\\d+: WARNING: Citation \\[ref2\\] is not referenced.\n' + + '.*/refs_inconsistency.txt:\\d+: WARNING: citation not found: ref3') + assert re.search(expected_citation_warning_expr, warnings), f'{expected_citation_warning_expr!r} did not match {warnings!r}' + + +@sphinx_intl +@pytest.mark.sphinx('text') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_noqa(app, warning): + app.build() + result = (app.outdir / 'noqa.txt').read_text(encoding='utf8') + expect = r"""FIRST SECTION +************* + +TRANSLATED TEXT WITHOUT REFERENCE. + +TEST noqa WHITESPACE INSENSITIVITY. + +"#noqa" IS ESCAPED AT THE END OF THIS STRING. #noqa + + +NEXT SECTION WITH PARAGRAPH TO TEST BARE noqa +********************************************* + +Some text, again referring to the section: NEXT SECTION WITH PARAGRAPH +TO TEST BARE noqa. +""" + assert result == expect + assert "next-section" not in getwarning(warning) + + +@sphinx_intl +@pytest.mark.sphinx('text') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_text_literalblock_warnings(app, warning): + app.build() + # --- check warning for literal block + result = (app.outdir / 'literalblock.txt').read_text(encoding='utf8') + expect = ("9. I18N WITH LITERAL BLOCK" + "\n**************************\n" + "\nCORRECT LITERAL BLOCK:\n" + "\n this is" + "\n literal block\n" + "\nMISSING LITERAL BLOCK:\n" + "\n<SYSTEM MESSAGE:") + assert result.startswith(expect) + + warnings = getwarning(warning) + expected_warning_expr = ('.*/literalblock.txt:\\d+: ' + 'WARNING: Literal block expected; none found.') + assert re.search(expected_warning_expr, warnings), f'{expected_warning_expr!r} did not match {warnings!r}' + + +@sphinx_intl +@pytest.mark.sphinx('text') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_text_definition_terms(app): + app.build() + # --- definition terms: regression test for #975, #2198, #2205 + result = (app.outdir / 'definition_terms.txt').read_text(encoding='utf8') + expect = ("13. I18N WITH DEFINITION TERMS" + "\n******************************\n" + "\nSOME TERM" + "\n THE CORRESPONDING DEFINITION\n" + "\nSOME *TERM* WITH LINK" + "\n THE CORRESPONDING DEFINITION #2\n" + "\nSOME **TERM** WITH : CLASSIFIER1 : CLASSIFIER2" + "\n THE CORRESPONDING DEFINITION\n" + "\nSOME TERM WITH : CLASSIFIER[]" + "\n THE CORRESPONDING DEFINITION\n") + assert result == expect + + +@sphinx_intl +@pytest.mark.sphinx('text') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_text_glossary_term(app, warning): + app.build() + # --- glossary terms: regression test for #1090 + result = (app.outdir / 'glossary_terms.txt').read_text(encoding='utf8') + expect = (r"""18. I18N WITH GLOSSARY TERMS +**************************** + +SOME NEW TERM + THE CORRESPONDING GLOSSARY + +SOME OTHER NEW TERM + THE CORRESPONDING GLOSSARY #2 + +LINK TO *SOME NEW TERM*. + +TRANSLATED GLOSSARY SHOULD BE SORTED BY TRANSLATED TERMS: + +TRANSLATED TERM XXX + DEFINE XXX + +TRANSLATED TERM YYY + DEFINE YYY + +TRANSLATED TERM ZZZ +VVV + DEFINE ZZZ +""") + assert result == expect + warnings = getwarning(warning) + assert 'term not in glossary' not in warnings + + +@sphinx_intl +@pytest.mark.sphinx('text') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_text_glossary_term_inconsistencies(app, warning): + app.build() + # --- glossary term inconsistencies: regression test for #1090 + result = (app.outdir / 'glossary_terms_inconsistency.txt').read_text(encoding='utf8') + expect = ("19. I18N WITH GLOSSARY TERMS INCONSISTENCY" + "\n******************************************\n" + "\n1. LINK TO *SOME NEW TERM*.\n") + assert result == expect + + warnings = getwarning(warning) + expected_warning_expr = ( + '.*/glossary_terms_inconsistency.txt:\\d+: ' + 'WARNING: inconsistent term references in translated message.' + " original: \\[':term:`Some term`', ':term:`Some other term`'\\]," + " translated: \\[':term:`SOME NEW TERM`'\\]\n") + assert re.search(expected_warning_expr, warnings), f'{expected_warning_expr!r} did not match {warnings!r}' + + +@sphinx_intl +@pytest.mark.sphinx('gettext') +@pytest.mark.test_params(shared_result='test_intl_gettext') +def test_gettext_section(app): + app.build() + # --- section + expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'section.po') + actual = read_po(app.outdir / 'section.pot') + for expect_msg in [m for m in expect if m.id]: + assert expect_msg.id in [m.id for m in actual if m.id] + + +@sphinx_intl +@pytest.mark.sphinx('text') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_text_section(app): + app.build() + # --- section + result = (app.outdir / 'section.txt').read_text(encoding='utf8') + expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'section.po') + for expect_msg in [m for m in expect if m.id]: + assert expect_msg.string in result + + +@sphinx_intl +@pytest.mark.sphinx('text') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_text_seealso(app): + app.build() + # --- seealso + result = (app.outdir / 'seealso.txt').read_text(encoding='utf8') + expect = ("12. I18N WITH SEEALSO" + "\n*********************\n" + "\nSee also: SHORT TEXT 1\n" + "\nSee also: LONG TEXT 1\n" + "\nSee also:\n" + "\n SHORT TEXT 2\n" + "\n LONG TEXT 2\n") + assert result == expect + + +@sphinx_intl +@pytest.mark.sphinx('text') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_text_figure_captions(app): + app.build() + # --- figure captions: regression test for #940 + result = (app.outdir / 'figure.txt').read_text(encoding='utf8') + expect = ("14. I18N WITH FIGURE CAPTION" + "\n****************************\n" + "\n [image]MY CAPTION OF THE FIGURE\n" + "\n MY DESCRIPTION PARAGRAPH1 OF THE FIGURE.\n" + "\n MY DESCRIPTION PARAGRAPH2 OF THE FIGURE.\n" + "\n" + "\n14.1. FIGURE IN THE BLOCK" + "\n=========================\n" + "\nBLOCK\n" + "\n [image]MY CAPTION OF THE FIGURE\n" + "\n MY DESCRIPTION PARAGRAPH1 OF THE FIGURE.\n" + "\n MY DESCRIPTION PARAGRAPH2 OF THE FIGURE.\n" + "\n" + "\n" + "14.2. IMAGE URL AND ALT\n" + "=======================\n" + "\n" + "[image: I18N -> IMG][image]\n" + "\n" + " [image: IMG -> I18N][image]\n" + "\n" + "\n" + "14.3. IMAGE ON SUBSTITUTION\n" + "===========================\n" + "\n" + "\n" + "14.4. IMAGE UNDER NOTE\n" + "======================\n" + "\n" + "Note:\n" + "\n" + " [image: i18n under note][image]\n" + "\n" + " [image: img under note][image]\n") + assert result == expect + + +@sphinx_intl +@pytest.mark.sphinx('text') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_text_rubric(app): + app.build() + # --- rubric: regression test for pull request #190 + result = (app.outdir / 'rubric.txt').read_text(encoding='utf8') + expect = ("I18N WITH RUBRIC" + "\n****************\n" + "\n-[ RUBRIC TITLE ]-\n" + "\n" + "\nRUBRIC IN THE BLOCK" + "\n===================\n" + "\nBLOCK\n" + "\n -[ RUBRIC TITLE ]-\n") + assert result == expect + + +@sphinx_intl +@pytest.mark.sphinx('text') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_text_docfields(app): + app.build() + # --- docfields + result = (app.outdir / 'docfields.txt').read_text(encoding='utf8') + expect = ("21. I18N WITH DOCFIELDS" + "\n***********************\n" + "\nclass Cls1\n" + "\n Parameters:" + "\n **param** -- DESCRIPTION OF PARAMETER param\n" + "\nclass Cls2\n" + "\n Parameters:" + "\n * **foo** -- DESCRIPTION OF PARAMETER foo\n" + "\n * **bar** -- DESCRIPTION OF PARAMETER bar\n" + "\nclass Cls3(values)\n" + "\n Raises:" + "\n **ValueError** -- IF THE VALUES ARE OUT OF RANGE\n" + "\nclass Cls4(values)\n" + "\n Raises:" + "\n * **TypeError** -- IF THE VALUES ARE NOT VALID\n" + "\n * **ValueError** -- IF THE VALUES ARE OUT OF RANGE\n" + "\nclass Cls5\n" + "\n Returns:" + '\n A NEW "Cls3" INSTANCE\n') + assert result == expect + + +@sphinx_intl +@pytest.mark.sphinx('text') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_text_admonitions(app): + app.build() + # --- admonitions + # #1206: gettext did not translate admonition directive's title + # seealso: https://docutils.sourceforge.io/docs/ref/rst/directives.html#admonitions + result = (app.outdir / 'admonitions.txt').read_text(encoding='utf8') + directives = ( + "attention", "caution", "danger", "error", "hint", + "important", "note", "tip", "warning", "admonition") + for d in directives: + assert d.upper() + " TITLE" in result + assert d.upper() + " BODY" in result + + # for #4938 `1. ` prefixed admonition title + assert "1. ADMONITION TITLE" in result + + +@sphinx_intl +@pytest.mark.sphinx('gettext') +@pytest.mark.test_params(shared_result='test_intl_gettext') +def test_gettext_toctree(app): + app.build() + # --- toctree (index.rst) + expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'index.po') + actual = read_po(app.outdir / 'index.pot') + for expect_msg in [m for m in expect if m.id]: + assert expect_msg.id in [m.id for m in actual if m.id] + # --- toctree (toctree.rst) + expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'toctree.po') + actual = read_po(app.outdir / 'toctree.pot') + for expect_msg in [m for m in expect if m.id]: + assert expect_msg.id in [m.id for m in actual if m.id] + + +@sphinx_intl +@pytest.mark.sphinx('gettext') +@pytest.mark.test_params(shared_result='test_intl_gettext') +def test_gettext_table(app): + app.build() + # --- toctree + expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'table.po') + actual = read_po(app.outdir / 'table.pot') + for expect_msg in [m for m in expect if m.id]: + assert expect_msg.id in [m.id for m in actual if m.id] + + +@sphinx_intl +@pytest.mark.sphinx('text') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_text_table(app): + app.build() + # --- toctree + result = (app.outdir / 'table.txt').read_text(encoding='utf8') + expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'table.po') + for expect_msg in [m for m in expect if m.id]: + assert expect_msg.string in result + + +@sphinx_intl +@pytest.mark.sphinx('text') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_text_toctree(app): + app.build() + # --- toctree (index.rst) + # Note: index.rst contains contents that is not shown in text. + result = (app.outdir / 'index.txt').read_text(encoding='utf8') + assert 'CONTENTS' in result + assert 'TABLE OF CONTENTS' in result + # --- toctree (toctree.rst) + result = (app.outdir / 'toctree.txt').read_text(encoding='utf8') + expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'toctree.po') + for expect_msg in (m for m in expect if m.id): + assert expect_msg.string in result + + +@sphinx_intl +@pytest.mark.sphinx('gettext') +@pytest.mark.test_params(shared_result='test_intl_gettext') +def test_gettext_topic(app): + app.build() + # --- topic + expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'topic.po') + actual = read_po(app.outdir / 'topic.pot') + for expect_msg in [m for m in expect if m.id]: + assert expect_msg.id in [m.id for m in actual if m.id] + + +@sphinx_intl +@pytest.mark.sphinx('text') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_text_topic(app): + app.build() + # --- topic + result = (app.outdir / 'topic.txt').read_text(encoding='utf8') + expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'topic.po') + for expect_msg in [m for m in expect if m.id]: + assert expect_msg.string in result + + +@sphinx_intl +@pytest.mark.sphinx('gettext') +@pytest.mark.test_params(shared_result='test_intl_gettext') +def test_gettext_definition_terms(app): + app.build() + # --- definition terms: regression test for #2198, #2205 + expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'definition_terms.po') + actual = read_po(app.outdir / 'definition_terms.pot') + for expect_msg in [m for m in expect if m.id]: + assert expect_msg.id in [m.id for m in actual if m.id] + + +@sphinx_intl +@pytest.mark.sphinx('gettext') +@pytest.mark.test_params(shared_result='test_intl_gettext') +def test_gettext_glossary_terms(app, warning): + app.build() + # --- glossary terms: regression test for #1090 + expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'glossary_terms.po') + actual = read_po(app.outdir / 'glossary_terms.pot') + for expect_msg in [m for m in expect if m.id]: + assert expect_msg.id in [m.id for m in actual if m.id] + warnings = warning.getvalue().replace(os.sep, '/') + assert 'term not in glossary' not in warnings + + +@sphinx_intl +@pytest.mark.sphinx('gettext') +@pytest.mark.test_params(shared_result='test_intl_gettext') +def test_gettext_glossary_term_inconsistencies(app): + app.build() + # --- glossary term inconsistencies: regression test for #1090 + expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'glossary_terms_inconsistency.po') + actual = read_po(app.outdir / 'glossary_terms_inconsistency.pot') + for expect_msg in [m for m in expect if m.id]: + assert expect_msg.id in [m.id for m in actual if m.id] + + +@sphinx_intl +@pytest.mark.sphinx('gettext') +@pytest.mark.test_params(shared_result='test_intl_gettext') +def test_gettext_literalblock(app): + app.build() + # --- gettext builder always ignores ``only`` directive + expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'literalblock.po') + actual = read_po(app.outdir / 'literalblock.pot') + for expect_msg in [m for m in expect if m.id]: + if len(expect_msg.id.splitlines()) == 1: + # compare translations only labels + assert expect_msg.id in [m.id for m in actual if m.id] + else: + pass # skip code-blocks and literalblocks + + +@sphinx_intl +@pytest.mark.sphinx('gettext') +@pytest.mark.test_params(shared_result='test_intl_gettext') +def test_gettext_buildr_ignores_only_directive(app): + app.build() + # --- gettext builder always ignores ``only`` directive + expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'only.po') + actual = read_po(app.outdir / 'only.pot') + for expect_msg in [m for m in expect if m.id]: + assert expect_msg.id in [m.id for m in actual if m.id] + + +@sphinx_intl +def test_node_translated_attribute(app): + app.builder.build_specific([app.srcdir / 'translation_progress.txt']) + + doctree = app.env.get_doctree('translation_progress') + + translated_nodes = sum(1 for _ in doctree.findall(NodeMatcher(translated=True))) + assert translated_nodes == 10 + 1 # 10 lines + title + + untranslated_nodes = sum(1 for _ in doctree.findall(NodeMatcher(translated=False))) + assert untranslated_nodes == 2 + 2 + 1 # 2 lines + 2 lines + substitution reference + + +@sphinx_intl +def test_translation_progress_substitution(app): + app.builder.build_specific([app.srcdir / 'translation_progress.txt']) + + doctree = app.env.get_doctree('translation_progress') + + assert doctree[0][19][0] == '68.75%' # 11 out of 16 lines are translated + + +@pytest.mark.sphinx(testroot='intl', freshenv=True, confoverrides={ + 'language': 'xx', 'locale_dirs': ['.'], + 'gettext_compact': False, + 'translation_progress_classes': True, +}) +def test_translation_progress_classes_true(app): + app.builder.build_specific([app.srcdir / 'translation_progress.txt']) + + doctree = app.env.get_doctree('translation_progress') + + # title + assert 'translated' in doctree[0][0]['classes'] + + # translated lines + assert 'translated' in doctree[0][1]['classes'] + assert 'translated' in doctree[0][2]['classes'] + assert 'translated' in doctree[0][3]['classes'] + assert 'translated' in doctree[0][4]['classes'] + assert 'translated' in doctree[0][5]['classes'] + assert 'translated' in doctree[0][6]['classes'] + assert 'translated' in doctree[0][7]['classes'] + assert 'translated' in doctree[0][8]['classes'] + + assert doctree[0][9]['classes'] == [] # comment node + + # idempotent + assert 'translated' in doctree[0][10]['classes'] + assert 'translated' in doctree[0][11]['classes'] + + assert doctree[0][12]['classes'] == [] # comment node + + # untranslated + assert 'untranslated' in doctree[0][13]['classes'] + assert 'untranslated' in doctree[0][14]['classes'] + + assert doctree[0][15]['classes'] == [] # comment node + + # missing + assert 'untranslated' in doctree[0][16]['classes'] + assert 'untranslated' in doctree[0][17]['classes'] + + assert doctree[0][18]['classes'] == [] # comment node + + # substitution reference + assert 'untranslated' in doctree[0][19]['classes'] + + assert len(doctree[0]) == 20 + + +@sphinx_intl +# use individual shared_result directory to avoid "incompatible doctree" error +@pytest.mark.sphinx(testroot='builder-gettext-dont-rebuild-mo') +def test_gettext_dont_rebuild_mo(make_app, app_params): + # --- don't rebuild by .mo mtime + def get_update_targets(app_): + app_.env.find_files(app_.config, app_.builder) + added, changed, removed = app_.env.get_outdated_files(config_changed=False) + return added, changed, removed + + args, kwargs = app_params + + # phase1: build document with non-gettext builder and generate mo file in srcdir + app0 = make_app('dummy', *args, **kwargs) + app0.build() + time.sleep(0.01) + assert (app0.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo').exists() + # Since it is after the build, the number of documents to be updated is 0 + update_targets = get_update_targets(app0) + assert update_targets[1] == set(), update_targets + # When rewriting the timestamp of mo file, the number of documents to be + # updated will be changed. + mtime = (app0.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo').stat().st_mtime + os.utime(app0.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo', (mtime + 5, mtime + 5)) + update_targets = get_update_targets(app0) + assert update_targets[1] == {'bom'}, update_targets + + # Because doctree for gettext builder can not be shared with other builders, + # erase doctreedir before gettext build. + shutil.rmtree(app0.doctreedir) + + # phase2: build document with gettext builder. + # The mo file in the srcdir directory is retained. + app = make_app('gettext', *args, **kwargs) + app.build() + time.sleep(0.01) + # Since it is after the build, the number of documents to be updated is 0 + update_targets = get_update_targets(app) + assert update_targets[1] == set(), update_targets + # Even if the timestamp of the mo file is updated, the number of documents + # to be updated is 0. gettext builder does not rebuild because of mo update. + os.utime(app0.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo', (mtime + 10, mtime + 10)) + update_targets = get_update_targets(app) + assert update_targets[1] == set(), update_targets + + +@sphinx_intl +@pytest.mark.sphinx('html') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_html_meta(app): + app.build() + # --- test for meta + result = (app.outdir / 'index.html').read_text(encoding='utf8') + expected_expr = '<meta content="TESTDATA FOR I18N" name="description" translated="True" />' + assert expected_expr in result + expected_expr = '<meta content="I18N, SPHINX, MARKUP" name="keywords" translated="True" />' + assert expected_expr in result + expected_expr = '<p class="caption" role="heading"><span class="caption-text">HIDDEN TOC</span></p>' + assert expected_expr in result + + +@sphinx_intl +@pytest.mark.sphinx('html') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_html_footnotes(app): + app.build() + # --- test for #955 cant-build-html-with-footnotes-when-using + # expect no error by build + (app.outdir / 'footnote.html').read_text(encoding='utf8') + + +@sphinx_intl +@pytest.mark.sphinx('html') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_html_undefined_refs(app): + app.build() + # --- links to undefined reference + result = (app.outdir / 'refs_inconsistency.html').read_text(encoding='utf8') + + expected_expr = ('<a class="reference external" ' + 'href="http://www.example.com">reference</a>') + assert len(re.findall(expected_expr, result)) == 2 + + expected_expr = ('<a class="reference internal" ' + 'href="#reference">reference</a>') + assert len(re.findall(expected_expr, result)) == 0 + + expected_expr = ('<a class="reference internal" ' + 'href="#i18n-with-refs-inconsistency">I18N WITH ' + 'REFS INCONSISTENCY</a>') + assert len(re.findall(expected_expr, result)) == 1 + + +@sphinx_intl +@pytest.mark.sphinx('html') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_html_index_entries(app): + app.build() + # --- index entries: regression test for #976 + result = (app.outdir / 'genindex.html').read_text(encoding='utf8') + + def wrap(tag, keyword): + start_tag = "<%s[^>]*>" % tag + end_tag = "</%s>" % tag + return fr"{start_tag}\s*{keyword}\s*{end_tag}" + + def wrap_nest(parenttag, childtag, keyword): + start_tag1 = "<%s[^>]*>" % parenttag + start_tag2 = "<%s[^>]*>" % childtag + return fr"{start_tag1}\s*{keyword}\s*{start_tag2}" + expected_exprs = [ + wrap('a', 'NEWSLETTER'), + wrap('a', 'MAILING LIST'), + wrap('a', 'RECIPIENTS LIST'), + wrap('a', 'FIRST SECOND'), + wrap('a', 'SECOND THIRD'), + wrap('a', 'THIRD, FIRST'), + wrap_nest('li', 'ul', 'ENTRY'), + wrap_nest('li', 'ul', 'SEE'), + ] + for expr in expected_exprs: + assert re.search(expr, result, re.MULTILINE), f'{expr!r} did not match {result!r}' + + +@sphinx_intl +@pytest.mark.sphinx('html') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_html_versionchanges(app): + app.build() + # --- versionchanges + result = (app.outdir / 'versionchange.html').read_text(encoding='utf8') + + def get_content(result, name): + matched = re.search(r'<div class="%s">\n*(.*?)</div>' % name, + result, re.DOTALL) + if matched: + return matched.group(1) + else: + return '' + + expect1 = ( + """<p><span class="versionmodified deprecated">Deprecated since version 1.0: </span>""" + """THIS IS THE <em>FIRST</em> PARAGRAPH OF DEPRECATED.</p>\n""" + """<p>THIS IS THE <em>SECOND</em> PARAGRAPH OF DEPRECATED.</p>\n""") + matched_content = get_content(result, "deprecated") + assert expect1 == matched_content + + expect2 = ( + """<p><span class="versionmodified added">New in version 1.0: </span>""" + """THIS IS THE <em>FIRST</em> PARAGRAPH OF VERSIONADDED.</p>\n""") + matched_content = get_content(result, "versionadded") + assert expect2 == matched_content + + expect3 = ( + """<p><span class="versionmodified changed">Changed in version 1.0: </span>""" + """THIS IS THE <em>FIRST</em> PARAGRAPH OF VERSIONCHANGED.</p>\n""") + matched_content = get_content(result, "versionchanged") + assert expect3 == matched_content + + +@sphinx_intl +@pytest.mark.sphinx('html') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_html_docfields(app): + app.build() + # --- docfields + # expect no error by build + (app.outdir / 'docfields.html').read_text(encoding='utf8') + + +@sphinx_intl +@pytest.mark.sphinx('html') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_html_template(app): + app.build() + # --- gettext template + result = (app.outdir / 'contents.html').read_text(encoding='utf8') + assert "WELCOME" in result + assert "SPHINX 2013.120" in result + + +@sphinx_intl +@pytest.mark.sphinx('html') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_html_rebuild_mo(app): + app.build() + # --- rebuild by .mo mtime + app.builder.build_update() + app.env.find_files(app.config, app.builder) + _, updated, _ = app.env.get_outdated_files(config_changed=False) + assert len(updated) == 0 + + mtime = (app.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo').stat().st_mtime + os.utime(app.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo', (mtime + 5, mtime + 5)) + app.env.find_files(app.config, app.builder) + _, updated, _ = app.env.get_outdated_files(config_changed=False) + assert len(updated) == 1 + + +@sphinx_intl +@pytest.mark.sphinx('xml') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_xml_footnotes(app, warning): + app.build() + # --- footnotes: regression test for fix #955, #1176 + et = etree_parse(app.outdir / 'footnote.xml') + secs = et.findall('section') + + para0 = secs[0].findall('paragraph') + assert_elem( + para0[0], + ['I18N WITH FOOTNOTE', 'INCLUDE THIS CONTENTS', + '2', '[ref]', '1', '100', '*', '. SECOND FOOTNOTE_REF', '100', '.'], + ['i18n-with-footnote', 'ref']) + + # check node_id for footnote_references which refer same footnote (refs: #3002) + assert para0[0][4].text == para0[0][6].text == '100' + assert para0[0][4].attrib['ids'] != para0[0][6].attrib['ids'] + + footnote0 = secs[0].findall('footnote') + assert_elem( + footnote0[0], + ['1', 'THIS IS A AUTO NUMBERED FOOTNOTE.'], + None, + ['1']) + assert_elem( + footnote0[1], + ['100', 'THIS IS A NUMBERED FOOTNOTE.'], + None, + ['100']) + assert_elem( + footnote0[2], + ['2', 'THIS IS A AUTO NUMBERED NAMED FOOTNOTE.'], + None, + ['named']) + assert_elem( + footnote0[3], + ['*', 'THIS IS A AUTO SYMBOL FOOTNOTE.'], + None, + None) + + citation0 = secs[0].findall('citation') + assert_elem( + citation0[0], + ['ref', 'THIS IS A NAMED FOOTNOTE.'], + None, + ['ref']) + + warnings = getwarning(warning) + warning_expr = '.*/footnote.xml:\\d*: SEVERE: Duplicate ID: ".*".\n' + assert not re.search(warning_expr, warnings), f'{warning_expr!r} did match {warnings!r}' + + +@sphinx_intl +@pytest.mark.sphinx('xml') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_xml_footnote_backlinks(app): + app.build() + # --- footnote backlinks: i18n test for #1058 + et = etree_parse(app.outdir / 'footnote.xml') + secs = et.findall('section') + + para0 = secs[0].findall('paragraph') + refs0 = para0[0].findall('footnote_reference') + refid2id = {r.attrib.get('refid'): r.attrib.get('ids') for r in refs0} + + footnote0 = secs[0].findall('footnote') + for footnote in footnote0: + ids = footnote.attrib.get('ids') + backrefs = footnote.attrib.get('backrefs').split() + assert refid2id[ids] in backrefs + + +@sphinx_intl +@pytest.mark.sphinx('xml') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_xml_refs_in_python_domain(app): + app.build() + # --- refs in the Python domain + et = etree_parse(app.outdir / 'refs_python_domain.xml') + secs = et.findall('section') + + # regression test for fix #1363 + para0 = secs[0].findall('paragraph') + assert_elem( + para0[0], + ['SEE THIS DECORATOR:', 'sensitive_variables()', '.'], + ['sensitive.sensitive_variables']) + + +@sphinx_intl +@pytest.mark.sphinx('xml') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_xml_keep_external_links(app): + app.build() + # --- keep external links: regression test for #1044 + et = etree_parse(app.outdir / 'external_links.xml') + secs = et.findall('section') + + para0 = secs[0].findall('paragraph') + # external link check + assert_elem( + para0[0], + ['EXTERNAL LINK TO', 'Python', '.'], + ['http://python.org/index.html']) + + # internal link check + assert_elem( + para0[1], + ['EXTERNAL LINKS', 'IS INTERNAL LINK.'], + ['i18n-with-external-links']) + + # inline link check + assert_elem( + para0[2], + ['INLINE LINK BY', 'THE SPHINX SITE', '.'], + ['http://sphinx-doc.org']) + + # unnamed link check + assert_elem( + para0[3], + ['UNNAMED', 'LINK', '.'], + ['http://google.com']) + + # link target swapped translation + para1 = secs[1].findall('paragraph') + assert_elem( + para1[0], + ['LINK TO', 'external2', 'AND', 'external1', '.'], + ['https://www.google.com/external2', + 'https://www.google.com/external1']) + assert_elem( + para1[1], + ['LINK TO', 'THE PYTHON SITE', 'AND', 'THE SPHINX SITE', '.'], + ['http://python.org', 'http://sphinx-doc.org']) + + # multiple references in the same line + para2 = secs[2].findall('paragraph') + assert_elem( + para2[0], + ['LINK TO', 'EXTERNAL LINKS', ',', 'Python', ',', + 'THE SPHINX SITE', ',', 'UNNAMED', 'AND', + 'THE PYTHON SITE', '.'], + ['i18n-with-external-links', 'http://python.org/index.html', + 'http://sphinx-doc.org', 'http://google.com', + 'http://python.org']) + + +@sphinx_intl +@pytest.mark.sphinx('xml') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_xml_role_xref(app): + app.build() + # --- role xref: regression test for #1090, #1193 + et = etree_parse(app.outdir / 'role_xref.xml') + sec1, sec2 = et.findall('section') + + para1, = sec1.findall('paragraph') + assert_elem( + para1, + ['LINK TO', "I18N ROCK'N ROLE XREF", ',', 'CONTENTS', ',', + 'SOME NEW TERM', '.'], + ['i18n-role-xref', 'index', + 'glossary_terms#term-Some-term']) + + para2 = sec2.findall('paragraph') + assert_elem( + para2[0], + ['LINK TO', 'SOME OTHER NEW TERM', 'AND', 'SOME NEW TERM', '.'], + ['glossary_terms#term-Some-other-term', + 'glossary_terms#term-Some-term']) + assert_elem( + para2[1], + ['LINK TO', 'LABEL', 'AND', + 'SAME TYPE LINKS', 'AND', 'SAME TYPE LINKS', '.'], + ['i18n-role-xref', 'same-type-links', 'same-type-links']) + assert_elem( + para2[2], + ['LINK TO', 'I18N WITH GLOSSARY TERMS', 'AND', 'CONTENTS', '.'], + ['glossary_terms', 'index']) + assert_elem( + para2[3], + ['LINK TO', '--module', 'AND', '-m', '.'], + ['cmdoption-module', 'cmdoption-m']) + assert_elem( + para2[4], + ['LINK TO', 'env2', 'AND', 'env1', '.'], + ['envvar-env2', 'envvar-env1']) + assert_elem( + para2[5], + ['LINK TO', 'token2', 'AND', 'token1', '.'], + []) # TODO: how do I link token role to productionlist? + assert_elem( + para2[6], + ['LINK TO', 'same-type-links', 'AND', "i18n-role-xref", '.'], + ['same-type-links', 'i18n-role-xref']) + + +@sphinx_intl +@pytest.mark.sphinx('xml') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_xml_warnings(app, warning): + app.build() + # warnings + warnings = getwarning(warning) + assert 'term not in glossary' not in warnings + assert 'undefined label' not in warnings + assert 'unknown document' not in warnings + + +@sphinx_intl +@pytest.mark.sphinx('xml') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_xml_label_targets(app): + app.build() + # --- label targets: regression test for #1193, #1265 + et = etree_parse(app.outdir / 'label_target.xml') + secs = et.findall('section') + + para0 = secs[0].findall('paragraph') + assert_elem( + para0[0], + ['X SECTION AND LABEL', 'POINT TO', 'implicit-target', 'AND', + 'X SECTION AND LABEL', 'POINT TO', 'section-and-label', '.'], + ['implicit-target', 'section-and-label']) + + para1 = secs[1].findall('paragraph') + assert_elem( + para1[0], + ['X EXPLICIT-TARGET', 'POINT TO', 'explicit-target', 'AND', + 'X EXPLICIT-TARGET', 'POINT TO DUPLICATED ID LIKE', 'id1', + '.'], + ['explicit-target', 'id1']) + + para2 = secs[2].findall('paragraph') + assert_elem( + para2[0], + ['X IMPLICIT SECTION NAME', 'POINT TO', + 'implicit-section-name', '.'], + ['implicit-section-name']) + + sec2 = secs[2].findall('section') + + para2_0 = sec2[0].findall('paragraph') + assert_elem( + para2_0[0], + ['`X DUPLICATED SUB SECTION`_', 'IS BROKEN LINK.'], + []) + + para3 = secs[3].findall('paragraph') + assert_elem( + para3[0], + ['X', 'bridge label', + 'IS NOT TRANSLATABLE BUT LINKED TO TRANSLATED ' + + 'SECTION TITLE.'], + ['label-bridged-target-section']) + assert_elem( + para3[1], + ['X', 'bridge label', 'POINT TO', + 'LABEL BRIDGED TARGET SECTION', 'AND', 'bridge label2', + 'POINT TO', 'SECTION AND LABEL', '. THE SECOND APPEARED', + 'bridge label2', 'POINT TO CORRECT TARGET.'], + ['label-bridged-target-section', + 'section-and-label', + 'section-and-label']) + + +@sphinx_intl +@pytest.mark.sphinx('html') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_additional_targets_should_not_be_translated(app): + app.build() + # [literalblock.txt] + result = (app.outdir / 'literalblock.html').read_text(encoding='utf8') + + # title should be translated + expected_expr = 'CODE-BLOCKS' + assert_count(expected_expr, result, 2) + + # ruby code block should not be translated but be highlighted + expected_expr = """<span class="s1">'result'</span>""" + assert_count(expected_expr, result, 1) + + # C code block without lang should not be translated and *ruby* highlighted + expected_expr = """<span class="c1">#include <stdlib.h></span>""" + assert_count(expected_expr, result, 1) + + # C code block with lang should not be translated but be *C* highlighted + expected_expr = ("""<span class="cp">#include</span>""" + """<span class="w"> </span>""" + """<span class="cpf"><stdio.h></span>""") + assert_count(expected_expr, result, 1) + + # literal block in list item should not be translated + expected_expr = ("""<span class="n">literal</span>""" + """<span class="o">-</span>""" + """<span class="n">block</span>\n""" + """<span class="k">in</span>""" + """<span class="w"> </span>""" + """<span class="n">list</span>""") + assert_count(expected_expr, result, 1) + + # doctest block should not be translated but be highlighted + expected_expr = ( + """<span class="gp">>>> </span>""" + """<span class="kn">import</span> <span class="nn">sys</span> """ + """<span class="c1"># sys importing</span>""") + assert_count(expected_expr, result, 1) + + # [raw.txt] + + result = (app.outdir / 'raw.html').read_text(encoding='utf8') + + # raw block should not be translated + expected_expr = """<iframe src="http://sphinx-doc.org"></iframe></section>""" + assert_count(expected_expr, result, 1) + + # [figure.txt] + + result = (app.outdir / 'figure.html').read_text(encoding='utf8') + + # src for image block should not be translated (alt is translated) + expected_expr = """<img alt="I18N -> IMG" src="_images/i18n.png" />""" + assert_count(expected_expr, result, 1) + + # src for figure block should not be translated (alt is translated) + expected_expr = """<img alt="IMG -> I18N" src="_images/img.png" />""" + assert_count(expected_expr, result, 1) + + +@sphinx_intl +@pytest.mark.sphinx( + 'html', + srcdir='test_additional_targets_should_be_translated', + confoverrides={ + 'language': 'xx', 'locale_dirs': ['.'], + 'gettext_compact': False, + 'gettext_additional_targets': [ + 'index', + 'literal-block', + 'doctest-block', + 'raw', + 'image', + ], + }, +) +def test_additional_targets_should_be_translated(app): + app.build() + # [literalblock.txt] + result = (app.outdir / 'literalblock.html').read_text(encoding='utf8') + + # title should be translated + expected_expr = 'CODE-BLOCKS' + assert_count(expected_expr, result, 2) + + # ruby code block should be translated and be highlighted + expected_expr = """<span class="s1">'RESULT'</span>""" + assert_count(expected_expr, result, 1) + + # C code block without lang should be translated and *ruby* highlighted + expected_expr = """<span class="c1">#include <STDLIB.H></span>""" + assert_count(expected_expr, result, 1) + + # C code block with lang should be translated and be *C* highlighted + expected_expr = ("""<span class="cp">#include</span>""" + """<span class="w"> </span>""" + """<span class="cpf"><STDIO.H></span>""") + assert_count(expected_expr, result, 1) + + # literal block in list item should be translated + expected_expr = ("""<span class="no">LITERAL</span>""" + """<span class="o">-</span>""" + """<span class="no">BLOCK</span>\n""" + """<span class="no">IN</span>""" + """<span class="w"> </span>""" + """<span class="no">LIST</span>""") + assert_count(expected_expr, result, 1) + + # doctest block should not be translated but be highlighted + expected_expr = ( + """<span class="gp">>>> </span>""" + """<span class="kn">import</span> <span class="nn">sys</span> """ + """<span class="c1"># SYS IMPORTING</span>""") + assert_count(expected_expr, result, 1) + + # '#noqa' should remain in literal blocks. + assert_count("#noqa", result, 1) + + # [raw.txt] + + result = (app.outdir / 'raw.html').read_text(encoding='utf8') + + # raw block should be translated + expected_expr = """<iframe src="HTTP://SPHINX-DOC.ORG"></iframe></section>""" + assert_count(expected_expr, result, 1) + + # [figure.txt] + + result = (app.outdir / 'figure.html').read_text(encoding='utf8') + + # alt and src for image block should be translated + expected_expr = """<img alt="I18N -> IMG" src="_images/img.png" />""" + assert_count(expected_expr, result, 1) + + # alt and src for figure block should be translated + expected_expr = """<img alt="IMG -> I18N" src="_images/i18n.png" />""" + assert_count(expected_expr, result, 1) + + +@pytest.mark.sphinx( + 'html', + testroot='intl_substitution_definitions', + confoverrides={ + 'language': 'xx', 'locale_dirs': ['.'], + 'gettext_compact': False, + 'gettext_additional_targets': [ + 'index', + 'literal-block', + 'doctest-block', + 'raw', + 'image', + ], + }, +) +def test_additional_targets_should_be_translated_substitution_definitions(app): + app.builder.build_all() + + # [prolog_epilog_substitution.txt] + + result = (app.outdir / 'prolog_epilog_substitution.html').read_text(encoding='utf8') + + # alt and src for image block should be translated + expected_expr = """<img alt="SUBST_PROLOG_2 TRANSLATED" src="_images/i18n.png" />""" + assert_count(expected_expr, result, 1) + + # alt and src for image block should be translated + expected_expr = """<img alt="SUBST_EPILOG_2 TRANSLATED" src="_images/img.png" />""" + assert_count(expected_expr, result, 1) + + +@sphinx_intl +@pytest.mark.sphinx('text') +@pytest.mark.test_params(shared_result='test_intl_basic') +def test_text_references(app, warning): + app.builder.build_specific([app.srcdir / 'refs.txt']) + + warnings = warning.getvalue().replace(os.sep, '/') + warning_expr = 'refs.txt:\\d+: ERROR: Unknown target name:' + assert_count(warning_expr, warnings, 0) + + +@pytest.mark.sphinx( + 'text', + testroot='intl_substitution_definitions', + confoverrides={ + 'language': 'xx', 'locale_dirs': ['.'], + 'gettext_compact': False, + }, +) +def test_text_prolog_epilog_substitution(app): + app.build() + + result = (app.outdir / 'prolog_epilog_substitution.txt').read_text(encoding='utf8') + + assert result == """\ +1. I18N WITH PROLOGUE AND EPILOGUE SUBSTITUTIONS +************************************************ + +THIS IS CONTENT THAT CONTAINS prologue substitute text. + +SUBSTITUTED IMAGE [image: SUBST_PROLOG_2 TRANSLATED][image] HERE. + +THIS IS CONTENT THAT CONTAINS epilogue substitute text. + +SUBSTITUTED IMAGE [image: SUBST_EPILOG_2 TRANSLATED][image] HERE. +""" + + +@pytest.mark.sphinx( + 'dummy', testroot='images', + srcdir='test_intl_images', + confoverrides={'language': 'xx'}, +) +def test_image_glob_intl(app): + app.build() + + # index.rst + doctree = app.env.get_doctree('index') + assert_node(doctree[0][1], nodes.image, uri='rimg.xx.png', + candidates={'*': 'rimg.xx.png'}) + + assert isinstance(doctree[0][2], nodes.figure) + assert_node(doctree[0][2][0], nodes.image, uri='rimg.xx.png', + candidates={'*': 'rimg.xx.png'}) + + assert_node(doctree[0][3], nodes.image, uri='img.*', + candidates={'application/pdf': 'img.pdf', + 'image/gif': 'img.gif', + 'image/png': 'img.png'}) + + assert isinstance(doctree[0][4], nodes.figure) + assert_node(doctree[0][4][0], nodes.image, uri='img.*', + candidates={'application/pdf': 'img.pdf', + 'image/gif': 'img.gif', + 'image/png': 'img.png'}) + + # subdir/index.rst + doctree = app.env.get_doctree('subdir/index') + assert_node(doctree[0][1], nodes.image, uri='subdir/rimg.xx.png', + candidates={'*': 'subdir/rimg.xx.png'}) + + assert_node(doctree[0][2], nodes.image, uri='subdir/svgimg.*', + candidates={'application/pdf': 'subdir/svgimg.pdf', + 'image/svg+xml': 'subdir/svgimg.xx.svg'}) + + assert isinstance(doctree[0][3], nodes.figure) + assert_node(doctree[0][3][0], nodes.image, uri='subdir/svgimg.*', + candidates={'application/pdf': 'subdir/svgimg.pdf', + 'image/svg+xml': 'subdir/svgimg.xx.svg'}) + + +@pytest.mark.sphinx( + 'dummy', testroot='images', + srcdir='test_intl_images', + confoverrides={ + 'language': 'xx', + 'figure_language_filename': '{root}{ext}.{language}', + }, +) +def test_image_glob_intl_using_figure_language_filename(app): + app.build() + + # index.rst + doctree = app.env.get_doctree('index') + assert_node(doctree[0][1], nodes.image, uri='rimg.png.xx', + candidates={'*': 'rimg.png.xx'}) + + assert isinstance(doctree[0][2], nodes.figure) + assert_node(doctree[0][2][0], nodes.image, uri='rimg.png.xx', + candidates={'*': 'rimg.png.xx'}) + + assert_node(doctree[0][3], nodes.image, uri='img.*', + candidates={'application/pdf': 'img.pdf', + 'image/gif': 'img.gif', + 'image/png': 'img.png'}) + + assert isinstance(doctree[0][4], nodes.figure) + assert_node(doctree[0][4][0], nodes.image, uri='img.*', + candidates={'application/pdf': 'img.pdf', + 'image/gif': 'img.gif', + 'image/png': 'img.png'}) + + # subdir/index.rst + doctree = app.env.get_doctree('subdir/index') + assert_node(doctree[0][1], nodes.image, uri='subdir/rimg.png', + candidates={'*': 'subdir/rimg.png'}) + + assert_node(doctree[0][2], nodes.image, uri='subdir/svgimg.*', + candidates={'application/pdf': 'subdir/svgimg.pdf', + 'image/svg+xml': 'subdir/svgimg.svg'}) + + assert isinstance(doctree[0][3], nodes.figure) + assert_node(doctree[0][3][0], nodes.image, uri='subdir/svgimg.*', + candidates={'application/pdf': 'subdir/svgimg.pdf', + 'image/svg+xml': 'subdir/svgimg.svg'}) + + +def getwarning(warnings): + return strip_escseq(warnings.getvalue().replace(os.sep, '/')) + + +@pytest.mark.sphinx('html', testroot='basic', + srcdir='gettext_allow_fuzzy_translations', + confoverrides={ + 'language': 'de', + 'gettext_allow_fuzzy_translations': True, + }) +def test_gettext_allow_fuzzy_translations(app): + locale_dir = app.srcdir / 'locales' / 'de' / 'LC_MESSAGES' + locale_dir.mkdir(parents=True, exist_ok=True) + with (locale_dir / 'index.po').open('wb') as f: + catalog = Catalog() + catalog.add('features', 'FEATURES', flags=('fuzzy',)) + pofile.write_po(f, catalog) + + app.build() + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert 'FEATURES' in content + + +@pytest.mark.sphinx('html', testroot='basic', + srcdir='gettext_disallow_fuzzy_translations', + confoverrides={ + 'language': 'de', + 'gettext_allow_fuzzy_translations': False, + }) +def test_gettext_disallow_fuzzy_translations(app): + locale_dir = app.srcdir / 'locales' / 'de' / 'LC_MESSAGES' + locale_dir.mkdir(parents=True, exist_ok=True) + with (locale_dir / 'index.po').open('wb') as f: + catalog = Catalog() + catalog.add('features', 'FEATURES', flags=('fuzzy',)) + pofile.write_po(f, catalog) + + app.build() + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert 'FEATURES' not in content + + +@pytest.mark.sphinx('html', testroot='basic', confoverrides={'language': 'de'}) +def test_customize_system_message(make_app, app_params, sphinx_test_tempdir): + try: + # clear translators cache + locale.translators.clear() + + # prepare message catalog (.po) + locale_dir = sphinx_test_tempdir / 'basic' / 'locales' / 'de' / 'LC_MESSAGES' + locale_dir.mkdir(parents=True, exist_ok=True) + with (locale_dir / 'sphinx.po').open('wb') as f: + catalog = Catalog() + catalog.add('Quick search', 'QUICK SEARCH') + pofile.write_po(f, catalog) + + # construct application and convert po file to .mo + args, kwargs = app_params + app = make_app(*args, **kwargs) + assert (locale_dir / 'sphinx.mo').exists() + assert app.translator.gettext('Quick search') == 'QUICK SEARCH' + + app.build() + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert 'QUICK SEARCH' in content + finally: + locale.translators.clear() + + +@pytest.mark.sphinx('html', testroot='intl', confoverrides={'today_fmt': '%Y-%m-%d'}) +def test_customize_today_date_format(app, monkeypatch): + with monkeypatch.context() as m: + m.setenv('SOURCE_DATE_EPOCH', '1439131307') + app.build() + content = (app.outdir / 'refs.html').read_text(encoding='utf8') + + assert '2015-08-09' in content diff --git a/tests/test_locale.py b/tests/test_locale.py new file mode 100644 index 0000000..11dd95d --- /dev/null +++ b/tests/test_locale.py @@ -0,0 +1,76 @@ +"""Test locale.""" + +import pytest + +from sphinx import locale + + +@pytest.fixture(autouse=True) +def _cleanup_translations(): + yield + locale.translators.clear() + + +def test_init(rootdir): + # not initialized yet + _ = locale.get_translation('myext') + assert _('Hello world') == 'Hello world' + assert _('Hello sphinx') == 'Hello sphinx' + assert _('Hello reST') == 'Hello reST' + + # load locale1 + locale.init([rootdir / 'test-locale' / 'locale1'], 'en', 'myext') + _ = locale.get_translation('myext') + assert _('Hello world') == 'HELLO WORLD' + assert _('Hello sphinx') == 'Hello sphinx' + assert _('Hello reST') == 'Hello reST' + + # load a catalog to unrelated namespace + locale.init([rootdir / 'test-locale' / 'locale2'], 'en', 'myext', 'mynamespace') + _ = locale.get_translation('myext') + assert _('Hello world') == 'HELLO WORLD' + assert _('Hello sphinx') == 'Hello sphinx' # nothing changed here + assert _('Hello reST') == 'Hello reST' + + # load locale2 in addition + locale.init([rootdir / 'test-locale' / 'locale2'], 'en', 'myext') + _ = locale.get_translation('myext') + assert _('Hello world') == 'HELLO WORLD' + assert _('Hello sphinx') == 'HELLO SPHINX' + assert _('Hello reST') == 'Hello reST' + + +def test_init_with_unknown_language(rootdir): + locale.init([rootdir / 'test-locale' / 'locale1'], 'unknown', 'myext') + _ = locale.get_translation('myext') + assert _('Hello world') == 'Hello world' + assert _('Hello sphinx') == 'Hello sphinx' + assert _('Hello reST') == 'Hello reST' + + +def test_add_message_catalog(app, rootdir): + app.config.language = 'en' + app.add_message_catalog('myext', rootdir / 'test-locale' / 'locale1') + _ = locale.get_translation('myext') + assert _('Hello world') == 'HELLO WORLD' + assert _('Hello sphinx') == 'Hello sphinx' + assert _('Hello reST') == 'Hello reST' + + +def _empty_language_translation(rootdir): + locale_dirs, catalog = [rootdir / 'test-locale' / 'locale1'], 'myext' + locale.translators.clear() + locale.init(locale_dirs, language=None, catalog=catalog) + return locale.get_translation(catalog) + + +def test_init_environment_language(rootdir, monkeypatch): + with monkeypatch.context() as m: + m.setenv("LANGUAGE", "en_US:en") + _ = _empty_language_translation(rootdir) + assert _('Hello world') == 'HELLO WORLD' + + with monkeypatch.context() as m: + m.setenv("LANGUAGE", "et_EE:et") + _ = _empty_language_translation(rootdir) + assert _('Hello world') == 'Tere maailm' diff --git a/tests/test_markup.py b/tests/test_markup.py new file mode 100644 index 0000000..0d877b3 --- /dev/null +++ b/tests/test_markup.py @@ -0,0 +1,619 @@ +"""Test various Sphinx-specific markup extensions.""" + +import re +import warnings +from types import SimpleNamespace + +import pytest +from docutils import frontend, nodes, utils +from docutils.parsers.rst import Parser as RstParser + +from sphinx import addnodes +from sphinx.builders.html.transforms import KeyboardTransform +from sphinx.builders.latex import LaTeXBuilder +from sphinx.environment import default_settings +from sphinx.roles import XRefRole +from sphinx.testing.util import assert_node +from sphinx.transforms import SphinxSmartQuotes +from sphinx.util import texescape +from sphinx.util.docutils import sphinx_domains +from sphinx.writers.html import HTML5Translator, HTMLWriter +from sphinx.writers.latex import LaTeXTranslator, LaTeXWriter + + +@pytest.fixture() +def settings(app): + texescape.init() # otherwise done by the latex builder + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', category=DeprecationWarning) + # DeprecationWarning: The frontend.OptionParser class will be replaced + # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later. + optparser = frontend.OptionParser( + components=(RstParser, HTMLWriter, LaTeXWriter), + defaults=default_settings) + settings = optparser.get_default_values() + settings.smart_quotes = True + settings.env = app.builder.env + settings.env.temp_data['docname'] = 'dummy' + settings.contentsname = 'dummy' + domain_context = sphinx_domains(settings.env) + domain_context.enable() + yield settings + domain_context.disable() + + +@pytest.fixture() +def new_document(settings): + def create(): + document = utils.new_document('test data', settings) + document['file'] = 'dummy' + return document + + return create + + +@pytest.fixture() +def inliner(new_document): + document = new_document() + document.reporter.get_source_and_line = lambda line=1: ('dummy.rst', line) + return SimpleNamespace(document=document, reporter=document.reporter) + + +@pytest.fixture() +def parse(new_document): + def parse_(rst): + document = new_document() + parser = RstParser() + parser.parse(rst, document) + SphinxSmartQuotes(document, startnode=None).apply() + for msg in list(document.findall(nodes.system_message)): + if msg['level'] == 1: + msg.replace_self([]) + return document + return parse_ + + +# since we're not resolving the markup afterwards, these nodes may remain +class ForgivingTranslator: + def visit_pending_xref(self, node): + pass + + def depart_pending_xref(self, node): + pass + + +class ForgivingHTMLTranslator(HTML5Translator, ForgivingTranslator): + pass + + +class ForgivingLaTeXTranslator(LaTeXTranslator, ForgivingTranslator): + pass + + +@pytest.fixture() +def verify_re_html(app, parse): + def verify(rst, html_expected): + document = parse(rst) + KeyboardTransform(document).apply() + html_translator = ForgivingHTMLTranslator(document, app.builder) + document.walkabout(html_translator) + html_translated = ''.join(html_translator.fragment).strip() + assert re.match(html_expected, html_translated), 'from ' + rst + return verify + + +@pytest.fixture() +def verify_re_latex(app, parse): + def verify(rst, latex_expected): + document = parse(rst) + app.builder = LaTeXBuilder(app, app.env) + app.builder.init() + theme = app.builder.themes.get('manual') + latex_translator = ForgivingLaTeXTranslator(document, app.builder, theme) + latex_translator.first_document = -1 # don't write \begin{document} + document.walkabout(latex_translator) + latex_translated = ''.join(latex_translator.body).strip() + assert re.match(latex_expected, latex_translated), 'from ' + repr(rst) + return verify + + +@pytest.fixture() +def verify_re(verify_re_html, verify_re_latex): + def verify_re_(rst, html_expected, latex_expected): + if html_expected: + verify_re_html(rst, html_expected) + if latex_expected: + verify_re_latex(rst, latex_expected) + return verify_re_ + + +@pytest.fixture() +def verify(verify_re_html, verify_re_latex): + def verify_(rst, html_expected, latex_expected): + if html_expected: + verify_re_html(rst, re.escape(html_expected) + '$') + if latex_expected: + verify_re_latex(rst, re.escape(latex_expected) + '$') + return verify_ + + +@pytest.fixture() +def get_verifier(verify, verify_re): + v = { + 'verify': verify, + 'verify_re': verify_re, + } + + def get(name): + return v[name] + return get + + +@pytest.mark.parametrize(('type', 'rst', 'html_expected', 'latex_expected'), [ + ( + # pep role + 'verify', + ':pep:`8`', + ('<p><span class="target" id="index-0"></span><a class="pep reference external" ' + 'href="https://peps.python.org/pep-0008/"><strong>PEP 8</strong></a></p>'), + ('\\sphinxAtStartPar\n' + '\\index{Python Enhancement Proposals@\\spxentry{Python Enhancement Proposals}' + '!PEP 8@\\spxentry{PEP 8}}\\sphinxhref{https://peps.python.org/pep-0008/}' + '{\\sphinxstylestrong{PEP 8}}'), + ), + ( + # pep role with anchor + 'verify', + ':pep:`8#id1`', + ('<p><span class="target" id="index-0"></span><a class="pep reference external" ' + 'href="https://peps.python.org/pep-0008/#id1">' + '<strong>PEP 8#id1</strong></a></p>'), + ('\\sphinxAtStartPar\n' + '\\index{Python Enhancement Proposals@\\spxentry{Python Enhancement Proposals}' + '!PEP 8\\#id1@\\spxentry{PEP 8\\#id1}}\\sphinxhref' + '{https://peps.python.org/pep-0008/\\#id1}' + '{\\sphinxstylestrong{PEP 8\\#id1}}'), + ), + ( + # rfc role + 'verify', + ':rfc:`2324`', + ('<p><span class="target" id="index-0"></span><a class="rfc reference external" ' + 'href="https://datatracker.ietf.org/doc/html/rfc2324.html"><strong>RFC 2324</strong></a></p>'), + ('\\sphinxAtStartPar\n' + '\\index{RFC@\\spxentry{RFC}!RFC 2324@\\spxentry{RFC 2324}}' + '\\sphinxhref{https://datatracker.ietf.org/doc/html/rfc2324.html}' + '{\\sphinxstylestrong{RFC 2324}}'), + ), + ( + # rfc role with anchor + 'verify', + ':rfc:`2324#id1`', + ('<p><span class="target" id="index-0"></span><a class="rfc reference external" ' + 'href="https://datatracker.ietf.org/doc/html/rfc2324.html#id1">' + '<strong>RFC 2324#id1</strong></a></p>'), + ('\\sphinxAtStartPar\n' + '\\index{RFC@\\spxentry{RFC}!RFC 2324\\#id1@\\spxentry{RFC 2324\\#id1}}' + '\\sphinxhref{https://datatracker.ietf.org/doc/html/rfc2324.html\\#id1}' + '{\\sphinxstylestrong{RFC 2324\\#id1}}'), + ), + ( + # correct interpretation of code with whitespace + 'verify_re', + '``code sample``', + ('<p><code class="(samp )?docutils literal notranslate"><span class="pre">' + 'code</span>   <span class="pre">sample</span></code></p>'), + r'\\sphinxAtStartPar\n\\sphinxcode{\\sphinxupquote{code sample}}', + ), + ( + # interpolation of arrows in menuselection + 'verify', + ':menuselection:`a --> b`', + ('<p><span class="menuselection">a \N{TRIANGULAR BULLET} b</span></p>'), + '\\sphinxAtStartPar\n\\sphinxmenuselection{a \\(\\rightarrow\\) b}', + ), + ( + # interpolation of ampersands in menuselection + 'verify', + ':menuselection:`&Foo -&&- &Bar`', + ('<p><span class="menuselection"><span class="accelerator">F</span>oo ' + '-&- <span class="accelerator">B</span>ar</span></p>'), + ('\\sphinxAtStartPar\n' + r'\sphinxmenuselection{\sphinxaccelerator{F}oo \sphinxhyphen{}' + r'\&\sphinxhyphen{} \sphinxaccelerator{B}ar}'), + ), + ( + # interpolation of ampersands in guilabel + 'verify', + ':guilabel:`&Foo -&&- &Bar`', + ('<p><span class="guilabel"><span class="accelerator">F</span>oo ' + '-&- <span class="accelerator">B</span>ar</span></p>'), + ('\\sphinxAtStartPar\n' + r'\sphinxguilabel{\sphinxaccelerator{F}oo \sphinxhyphen{}\&\sphinxhyphen{} \sphinxaccelerator{B}ar}'), + ), + ( + # no ampersands in guilabel + 'verify', + ':guilabel:`Foo`', + '<p><span class="guilabel">Foo</span></p>', + '\\sphinxAtStartPar\n\\sphinxguilabel{Foo}', + ), + ( + # kbd role + 'verify', + ':kbd:`space`', + '<p><kbd class="kbd docutils literal notranslate">space</kbd></p>', + '\\sphinxAtStartPar\n\\sphinxkeyboard{\\sphinxupquote{space}}', + ), + ( + # kbd role + 'verify', + ':kbd:`Control+X`', + ('<p><kbd class="kbd compound docutils literal notranslate">' + '<kbd class="kbd docutils literal notranslate">Control</kbd>' + '+' + '<kbd class="kbd docutils literal notranslate">X</kbd>' + '</kbd></p>'), + '\\sphinxAtStartPar\n\\sphinxkeyboard{\\sphinxupquote{Control+X}}', + ), + ( + # kbd role + 'verify', + ':kbd:`Alt+^`', + ('<p><kbd class="kbd compound docutils literal notranslate">' + '<kbd class="kbd docutils literal notranslate">Alt</kbd>' + '+' + '<kbd class="kbd docutils literal notranslate">^</kbd>' + '</kbd></p>'), + ('\\sphinxAtStartPar\n' + '\\sphinxkeyboard{\\sphinxupquote{Alt+\\textasciicircum{}}}'), + ), + ( + # kbd role + 'verify', + ':kbd:`M-x M-s`', + ('<p><kbd class="kbd compound docutils literal notranslate">' + '<kbd class="kbd docutils literal notranslate">M</kbd>' + '-' + '<kbd class="kbd docutils literal notranslate">x</kbd>' + ' ' + '<kbd class="kbd docutils literal notranslate">M</kbd>' + '-' + '<kbd class="kbd docutils literal notranslate">s</kbd>' + '</kbd></p>'), + ('\\sphinxAtStartPar\n' + '\\sphinxkeyboard{\\sphinxupquote{M\\sphinxhyphen{}x M\\sphinxhyphen{}s}}'), + ), + ( + # kbd role + 'verify', + ':kbd:`-`', + '<p><kbd class="kbd docutils literal notranslate">-</kbd></p>', + ('\\sphinxAtStartPar\n' + '\\sphinxkeyboard{\\sphinxupquote{\\sphinxhyphen{}}}'), + ), + ( + # kbd role + 'verify', + ':kbd:`Caps Lock`', + '<p><kbd class="kbd docutils literal notranslate">Caps Lock</kbd></p>', + ('\\sphinxAtStartPar\n' + '\\sphinxkeyboard{\\sphinxupquote{Caps Lock}}'), + ), + ( + # kbd role + 'verify', + ':kbd:`sys rq`', + '<p><kbd class="kbd docutils literal notranslate">sys rq</kbd></p>', + ('\\sphinxAtStartPar\n' + '\\sphinxkeyboard{\\sphinxupquote{sys rq}}'), + ), + ( + # non-interpolation of dashes in option role + 'verify_re', + ':option:`--with-option`', + ('<p><code( class="xref std std-option docutils literal notranslate")?>' + '<span class="pre">--with-option</span></code></p>$'), + (r'\\sphinxAtStartPar\n' + r'\\sphinxcode{\\sphinxupquote{\\sphinxhyphen{}\\sphinxhyphen{}with\\sphinxhyphen{}option}}$'), + ), + ( + # verify smarty-pants quotes + 'verify', + '"John"', + '<p>“John”</p>', + "\\sphinxAtStartPar\n“John”", + ), + ( + # ... but not in literal text + 'verify', + '``"John"``', + ('<p><code class="docutils literal notranslate"><span class="pre">' + '"John"</span></code></p>'), + '\\sphinxAtStartPar\n\\sphinxcode{\\sphinxupquote{"John"}}', + ), + ( + # verify classes for inline roles + 'verify', + ':manpage:`mp(1)`', + '<p><em class="manpage">mp(1)</em></p>', + '\\sphinxAtStartPar\n\\sphinxstyleliteralemphasis{\\sphinxupquote{mp(1)}}', + ), + ( + # correct escaping in normal mode + 'verify', + 'Γ\\\\∞$', + None, + '\\sphinxAtStartPar\nΓ\\textbackslash{}\\(\\infty\\)\\$', + ), + ( + # in verbatim code fragments + 'verify', + '::\n\n @Γ\\∞${}', + None, + ('\\begin{sphinxVerbatim}[commandchars=\\\\\\{\\}]\n' + '@Γ\\PYGZbs{}\\(\\infty\\)\\PYGZdl{}\\PYGZob{}\\PYGZcb{}\n' + '\\end{sphinxVerbatim}'), + ), + ( + # in URIs + 'verify_re', + '`test <https://www.google.com/~me/>`_', + None, + r'\\sphinxAtStartPar\n\\sphinxhref{https://www.google.com/~me/}{test}.*', + ), + ( + # description list: simple + 'verify', + 'term\n description', + '<dl class="simple">\n<dt>term</dt><dd><p>description</p>\n</dd>\n</dl>', + None, + ), + ( + # description list: with classifiers + 'verify', + 'term : class1 : class2\n description', + ('<dl class="simple">\n<dt>term<span class="classifier">class1</span>' + '<span class="classifier">class2</span></dt><dd><p>description</p>\n</dd>\n</dl>'), + None, + ), + ( + # glossary (description list): multiple terms + 'verify', + '.. glossary::\n\n term1\n term2\n description', + ('<dl class="simple glossary">\n' + '<dt id="term-term1">term1<a class="headerlink" href="#term-term1"' + ' title="Link to this term">¶</a></dt>' + '<dt id="term-term2">term2<a class="headerlink" href="#term-term2"' + ' title="Link to this term">¶</a></dt>' + '<dd><p>description</p>\n</dd>\n</dl>'), + None, + ), +]) +def test_inline(get_verifier, type, rst, html_expected, latex_expected): + verifier = get_verifier(type) + verifier(rst, html_expected, latex_expected) + + +@pytest.mark.parametrize(('type', 'rst', 'html_expected', 'latex_expected'), [ + ( + 'verify', + r'4 backslashes \\\\', + r'<p>4 backslashes \\</p>', + None, + ), +]) +def test_inline_docutils16(get_verifier, type, rst, html_expected, latex_expected): + verifier = get_verifier(type) + verifier(rst, html_expected, latex_expected) + + +@pytest.mark.sphinx(confoverrides={'latex_engine': 'xelatex'}) +@pytest.mark.parametrize(('type', 'rst', 'html_expected', 'latex_expected'), [ + ( + # in verbatim code fragments + 'verify', + '::\n\n @Γ\\∞${}', + None, + ('\\begin{sphinxVerbatim}[commandchars=\\\\\\{\\}]\n' + '@Γ\\PYGZbs{}∞\\PYGZdl{}\\PYGZob{}\\PYGZcb{}\n' + '\\end{sphinxVerbatim}'), + ), +]) +def test_inline_for_unicode_latex_engine(get_verifier, type, rst, + html_expected, latex_expected): + verifier = get_verifier(type) + verifier(rst, html_expected, latex_expected) + + +def test_samp_role(parse): + # no braces + text = ':samp:`a{b}c`' + doctree = parse(text) + assert_node(doctree[0], [nodes.paragraph, nodes.literal, ("a", + [nodes.emphasis, "b"], + "c")]) + # nested braces + text = ':samp:`a{{b}}c`' + doctree = parse(text) + assert_node(doctree[0], [nodes.paragraph, nodes.literal, ("a", + [nodes.emphasis, "{b"], + "}c")]) + + # half-opened braces + text = ':samp:`a{bc`' + doctree = parse(text) + assert_node(doctree[0], [nodes.paragraph, nodes.literal, "a{bc"]) + + # escaped braces + text = ':samp:`a\\\\{b}c`' + doctree = parse(text) + assert_node(doctree[0], [nodes.paragraph, nodes.literal, "a{b}c"]) + + # no braces (whitespaces are keeped as is) + text = ':samp:`code sample`' + doctree = parse(text) + assert_node(doctree[0], [nodes.paragraph, nodes.literal, "code sample"]) + + +def test_download_role(parse): + # implicit + text = ':download:`sphinx.rst`' + doctree = parse(text) + assert_node(doctree[0], [nodes.paragraph, addnodes.download_reference, + nodes.literal, "sphinx.rst"]) + assert_node(doctree[0][0], refdoc='dummy', refdomain='', reftype='download', + refexplicit=False, reftarget='sphinx.rst', refwarn=False) + assert_node(doctree[0][0][0], classes=['xref', 'download']) + + # explicit + text = ':download:`reftitle <sphinx.rst>`' + doctree = parse(text) + assert_node(doctree[0], [nodes.paragraph, addnodes.download_reference, + nodes.literal, "reftitle"]) + assert_node(doctree[0][0], refdoc='dummy', refdomain='', reftype='download', + refexplicit=True, reftarget='sphinx.rst', refwarn=False) + assert_node(doctree[0][0][0], classes=['xref', 'download']) + + +def test_XRefRole(inliner): + role = XRefRole() + + # implicit + doctrees, errors = role('ref', 'rawtext', 'text', 5, inliner, {}, []) + assert len(doctrees) == 1 + assert_node(doctrees[0], [addnodes.pending_xref, nodes.literal, 'text']) + assert_node(doctrees[0], refdoc='dummy', refdomain='', reftype='ref', reftarget='text', + refexplicit=False, refwarn=False) + assert errors == [] + + # explicit + doctrees, errors = role('ref', 'rawtext', 'title <target>', 5, inliner, {}, []) + assert_node(doctrees[0], [addnodes.pending_xref, nodes.literal, 'title']) + assert_node(doctrees[0], refdoc='dummy', refdomain='', reftype='ref', reftarget='target', + refexplicit=True, refwarn=False) + + # bang + doctrees, errors = role('ref', 'rawtext', '!title <target>', 5, inliner, {}, []) + assert_node(doctrees[0], [nodes.literal, 'title <target>']) + + # refdomain + doctrees, errors = role('test:doc', 'rawtext', 'text', 5, inliner, {}, []) + assert_node(doctrees[0], [addnodes.pending_xref, nodes.literal, 'text']) + assert_node(doctrees[0], refdoc='dummy', refdomain='test', reftype='doc', reftarget='text', + refexplicit=False, refwarn=False) + + # fix_parens + role = XRefRole(fix_parens=True) + doctrees, errors = role('ref', 'rawtext', 'text()', 5, inliner, {}, []) + assert_node(doctrees[0], [addnodes.pending_xref, nodes.literal, 'text()']) + assert_node(doctrees[0], refdoc='dummy', refdomain='', reftype='ref', reftarget='text', + refexplicit=False, refwarn=False) + + # lowercase + role = XRefRole(lowercase=True) + doctrees, errors = role('ref', 'rawtext', 'TEXT', 5, inliner, {}, []) + assert_node(doctrees[0], [addnodes.pending_xref, nodes.literal, 'TEXT']) + assert_node(doctrees[0], refdoc='dummy', refdomain='', reftype='ref', reftarget='text', + refexplicit=False, refwarn=False) + + +@pytest.mark.sphinx('dummy', testroot='prolog') +def test_rst_prolog(app, status, warning): + app.builder.build_all() + rst = app.env.get_doctree('restructuredtext') + md = app.env.get_doctree('markdown') + + # rst_prolog + assert_node(rst[0], nodes.paragraph) + assert_node(rst[0][0], nodes.emphasis) + assert_node(rst[0][0][0], nodes.Text) + assert rst[0][0][0] == 'Hello world' + + # rst_epilog + assert_node(rst[-1], nodes.section) + assert_node(rst[-1][-1], nodes.paragraph) + assert_node(rst[-1][-1][0], nodes.emphasis) + assert_node(rst[-1][-1][0][0], nodes.Text) + assert rst[-1][-1][0][0] == 'Good-bye world' + + # rst_prolog & rst_epilog on exlucding reST parser + assert not md.rawsource.startswith('*Hello world*.') + assert not md.rawsource.endswith('*Good-bye world*.\n') + + +@pytest.mark.sphinx('dummy', testroot='keep_warnings') +def test_keep_warnings_is_True(app, status, warning): + app.builder.build_all() + doctree = app.env.get_doctree('index') + assert_node(doctree[0], nodes.section) + assert len(doctree[0]) == 2 + assert_node(doctree[0][1], nodes.system_message) + + +@pytest.mark.sphinx('dummy', testroot='keep_warnings', + confoverrides={'keep_warnings': False}) +def test_keep_warnings_is_False(app, status, warning): + app.builder.build_all() + doctree = app.env.get_doctree('index') + assert_node(doctree[0], nodes.section) + assert len(doctree[0]) == 1 + + +@pytest.mark.sphinx('dummy', testroot='refonly_bullet_list') +def test_compact_refonly_bullet_list(app, status, warning): + app.builder.build_all() + doctree = app.env.get_doctree('index') + assert_node(doctree[0], nodes.section) + assert len(doctree[0]) == 5 + + assert doctree[0][1].astext() == 'List A:' + assert_node(doctree[0][2], nodes.bullet_list) + assert_node(doctree[0][2][0][0], addnodes.compact_paragraph) + assert doctree[0][2][0][0].astext() == 'genindex' + + assert doctree[0][3].astext() == 'List B:' + assert_node(doctree[0][4], nodes.bullet_list) + assert_node(doctree[0][4][0][0], nodes.paragraph) + assert doctree[0][4][0][0].astext() == 'Hello' + + +@pytest.mark.sphinx('dummy', testroot='default_role') +def test_default_role1(app, status, warning): + app.builder.build_all() + + # default-role: pep + doctree = app.env.get_doctree('index') + assert_node(doctree[0], nodes.section) + assert_node(doctree[0][1], nodes.paragraph) + assert_node(doctree[0][1][0], addnodes.index) + assert_node(doctree[0][1][1], nodes.target) + assert_node(doctree[0][1][2], nodes.reference, classes=["pep"]) + + # no default-role + doctree = app.env.get_doctree('foo') + assert_node(doctree[0], nodes.section) + assert_node(doctree[0][1], nodes.paragraph) + assert_node(doctree[0][1][0], nodes.title_reference) + assert_node(doctree[0][1][1], nodes.Text) + + +@pytest.mark.sphinx('dummy', testroot='default_role', + confoverrides={'default_role': 'guilabel'}) +def test_default_role2(app, status, warning): + app.builder.build_all() + + # default-role directive is stronger than configratuion + doctree = app.env.get_doctree('index') + assert_node(doctree[0], nodes.section) + assert_node(doctree[0][1], nodes.paragraph) + assert_node(doctree[0][1][0], addnodes.index) + assert_node(doctree[0][1][1], nodes.target) + assert_node(doctree[0][1][2], nodes.reference, classes=["pep"]) + + # default_role changes the default behavior + doctree = app.env.get_doctree('foo') + assert_node(doctree[0], nodes.section) + assert_node(doctree[0][1], nodes.paragraph) + assert_node(doctree[0][1][0], nodes.inline, classes=["guilabel"]) + assert_node(doctree[0][1][1], nodes.Text) diff --git a/tests/test_metadata.py b/tests/test_metadata.py new file mode 100644 index 0000000..7f31997 --- /dev/null +++ b/tests/test_metadata.py @@ -0,0 +1,43 @@ +"""Test our handling of metadata in files with bibliographic metadata.""" + +# adapted from an example of bibliographic metadata at +# https://docutils.sourceforge.io/docs/user/rst/demo.txt + +import pytest + + +@pytest.mark.sphinx('dummy', testroot='metadata') +def test_docinfo(app, status, warning): + """ + Inspect the 'docinfo' metadata stored in the first node of the document. + Note this doesn't give us access to data stored in subsequence blocks + that might be considered document metadata, such as 'abstract' or + 'dedication' blocks, or the 'meta' role. Doing otherwise is probably more + messing with the internals of sphinx than this rare use case merits. + """ + app.build() + expecteddocinfo = { + 'author': 'David Goodger', + 'authors': ['Me', 'Myself', 'I'], + 'address': '123 Example Street\nExample, EX Canada\nA1B 2C3', + 'field name': 'This is a generic bibliographic field.', + 'field name 2': ('Generic bibliographic fields may contain multiple ' + 'body elements.\n\nLike this.'), + 'status': 'This is a “work in progress”', + 'version': '1', + 'copyright': ('This document has been placed in the public domain. ' + 'You\nmay do with it as you wish. You may copy, modify,' + '\nredistribute, reattribute, sell, buy, rent, lease,\n' + 'destroy, or improve it, quote it at length, excerpt,\n' + 'incorporate, collate, fold, staple, or mutilate it, or ' + 'do\nanything else to it that your or anyone else’s ' + 'heart\ndesires.'), + 'contact': 'goodger@python.org', + 'date': '2006-05-21', + 'organization': 'humankind', + 'revision': '4564', + 'tocdepth': 1, + 'orphan': '', + 'nocomments': '', + } + assert app.env.metadata['index'] == expecteddocinfo diff --git a/tests/test_parser.py b/tests/test_parser.py new file mode 100644 index 0000000..86163c6 --- /dev/null +++ b/tests/test_parser.py @@ -0,0 +1,57 @@ +"""Tests parsers module.""" + +from unittest.mock import Mock, patch + +import pytest + +from sphinx.parsers import RSTParser +from sphinx.util.docutils import new_document + + +@pytest.mark.sphinx(testroot='basic') +@patch('docutils.parsers.rst.states.RSTStateMachine') +def test_RSTParser_prolog_epilog(RSTStateMachine, app): + document = new_document('dummy.rst') + document.settings = Mock(tab_width=8, language_code='') + parser = RSTParser() + parser.set_application(app) + + # normal case + text = ('hello Sphinx world\n' + 'Sphinx is a document generator') + parser.parse(text, document) + (content, _), _ = RSTStateMachine().run.call_args + + assert list(content.xitems()) == [('dummy.rst', 0, 'hello Sphinx world'), + ('dummy.rst', 1, 'Sphinx is a document generator')] + + # with rst_prolog + app.env.config.rst_prolog = 'this is rst_prolog\nhello reST!' + parser.parse(text, document) + (content, _), _ = RSTStateMachine().run.call_args + assert list(content.xitems()) == [('<rst_prolog>', 0, 'this is rst_prolog'), + ('<rst_prolog>', 1, 'hello reST!'), + ('<generated>', 0, ''), + ('dummy.rst', 0, 'hello Sphinx world'), + ('dummy.rst', 1, 'Sphinx is a document generator')] + + # with rst_epilog + app.env.config.rst_prolog = None + app.env.config.rst_epilog = 'this is rst_epilog\ngood-bye reST!' + parser.parse(text, document) + (content, _), _ = RSTStateMachine().run.call_args + assert list(content.xitems()) == [('dummy.rst', 0, 'hello Sphinx world'), + ('dummy.rst', 1, 'Sphinx is a document generator'), + ('dummy.rst', 2, ''), + ('<rst_epilog>', 0, 'this is rst_epilog'), + ('<rst_epilog>', 1, 'good-bye reST!')] + + # expandtabs / convert whitespaces + app.env.config.rst_prolog = None + app.env.config.rst_epilog = None + text = ('\thello Sphinx world\n' + '\v\fSphinx is a document generator') + parser.parse(text, document) + (content, _), _ = RSTStateMachine().run.call_args + assert list(content.xitems()) == [('dummy.rst', 0, ' hello Sphinx world'), + ('dummy.rst', 1, ' Sphinx is a document generator')] diff --git a/tests/test_project.py b/tests/test_project.py new file mode 100644 index 0000000..45ae7c8 --- /dev/null +++ b/tests/test_project.py @@ -0,0 +1,78 @@ +"""Tests project module.""" + +import pytest + +from sphinx.project import Project + +DOCNAMES = {'autodoc', 'bom', 'extapi', 'extensions', 'footnote', 'images', + 'includes', 'index', 'lists', 'markup', 'math', 'objects', + 'subdir/excluded', 'subdir/images', 'subdir/includes'} +SUBDIR_DOCNAMES = {'subdir/excluded', 'subdir/images', 'subdir/includes'} + + +def test_project_discover_basic(rootdir): + # basic case + project = Project(rootdir / 'test-root', ['.txt']) + assert project.discover() == DOCNAMES + + +def test_project_discover_exclude_patterns(rootdir): + project = Project(rootdir / 'test-root', ['.txt']) + + # exclude_paths option + assert project.discover(['subdir/*']) == DOCNAMES - SUBDIR_DOCNAMES + assert project.discover(['.txt', 'subdir/*']) == DOCNAMES - SUBDIR_DOCNAMES + + +def test_project_discover_multiple_suffixes(rootdir): + # multiple source_suffixes + project = Project(rootdir / 'test-root', ['.txt', '.foo']) + assert project.discover() == DOCNAMES | {'otherext'} + + +def test_project_discover_complicated_suffix(rootdir): + # complicated source_suffix + project = Project(rootdir / 'test-root', ['.foo.png']) + assert project.discover() == {'img'} + + +def test_project_discover_templates_path(rootdir): + # templates_path + project = Project(rootdir / 'test-root', ['.html']) + assert project.discover() == {'_templates/layout', + '_templates/customsb', + '_templates/contentssb'} + + assert project.discover(['_templates']) == set() + + +def test_project_path2doc(rootdir): + project = Project(rootdir / 'test-basic', {'.rst': 'restructuredtext'}) + assert project.path2doc('index.rst') == 'index' + assert project.path2doc('index.foo') is None # unknown extension + assert project.path2doc('index.foo.rst') == 'index.foo' + assert project.path2doc('index') is None + assert project.path2doc('path/to/index.rst') == 'path/to/index' + assert project.path2doc(rootdir / 'test-basic' / 'to/index.rst') == 'to/index' + + +@pytest.mark.sphinx(srcdir='project_doc2path', testroot='basic') +def test_project_doc2path(app): + source_suffix = {'.rst': 'restructuredtext', '.txt': 'restructuredtext'} + + project = Project(app.srcdir, source_suffix) + project.discover() + + # absolute path + assert project.doc2path('index', absolute=True) == str(app.srcdir / 'index.rst') + + # relative path + assert project.doc2path('index', absolute=False) == 'index.rst' + + # first source_suffix is used for missing file + assert project.doc2path('foo', absolute=False) == 'foo.rst' + + # matched source_suffix is used if exists + (app.srcdir / 'bar.txt').touch() + project.discover() + assert project.doc2path('bar', absolute=False) == 'bar.txt' diff --git a/tests/test_pycode.py b/tests/test_pycode.py new file mode 100644 index 0000000..5739787 --- /dev/null +++ b/tests/test_pycode.py @@ -0,0 +1,173 @@ +"""Test pycode.""" + +import os +import sys + +import pytest + +import sphinx +from sphinx.errors import PycodeError +from sphinx.pycode import ModuleAnalyzer + +SPHINX_MODULE_PATH = os.path.splitext(sphinx.__file__)[0] + '.py' + + +def test_ModuleAnalyzer_get_module_source(): + assert ModuleAnalyzer.get_module_source('sphinx') == (sphinx.__file__, sphinx.__loader__.get_source('sphinx')) + + # failed to obtain source information from builtin modules + with pytest.raises(PycodeError): + ModuleAnalyzer.get_module_source('builtins') + with pytest.raises(PycodeError): + ModuleAnalyzer.get_module_source('itertools') + + +def test_ModuleAnalyzer_for_string(): + analyzer = ModuleAnalyzer.for_string('print("Hello world")', 'module_name') + assert analyzer.modname == 'module_name' + assert analyzer.srcname == '<string>' + + +def test_ModuleAnalyzer_for_file(): + analyzer = ModuleAnalyzer.for_string(SPHINX_MODULE_PATH, 'sphinx') + assert analyzer.modname == 'sphinx' + assert analyzer.srcname == '<string>' + + +def test_ModuleAnalyzer_for_module(rootdir): + analyzer = ModuleAnalyzer.for_module('sphinx') + assert analyzer.modname == 'sphinx' + assert analyzer.srcname in (SPHINX_MODULE_PATH, + os.path.abspath(SPHINX_MODULE_PATH)) + + path = str(rootdir / 'test-pycode') + sys.path.insert(0, path) + try: + analyzer = ModuleAnalyzer.for_module('cp_1251_coded') + docs = analyzer.find_attr_docs() + assert docs == {('', 'X'): ['It MUST look like X="\u0425"', '']} + finally: + sys.path.pop(0) + + +def test_ModuleAnalyzer_find_tags(): + code = ('class Foo(object):\n' # line: 1 + ' """class Foo!"""\n' + ' def __init__(self):\n' + ' pass\n' + '\n' + ' def bar(self, arg1, arg2=True, *args, **kwargs):\n' + ' """method Foo.bar"""\n' + ' pass\n' + '\n' + ' class Baz(object):\n' + ' def __init__(self):\n' # line: 11 + ' pass\n' + '\n' + 'def qux():\n' + ' """function baz"""\n' + ' pass\n' + '\n' + '@decorator1\n' + '@decorator2\n' + 'def quux():\n' + ' pass\n' # line: 21 + '\n' + 'class Corge(object):\n' + ' @decorator1\n' + ' @decorator2\n' + ' def grault(self):\n' + ' pass\n') + analyzer = ModuleAnalyzer.for_string(code, 'module') + tags = analyzer.find_tags() + assert set(tags.keys()) == {'Foo', 'Foo.__init__', 'Foo.bar', + 'Foo.Baz', 'Foo.Baz.__init__', 'qux', 'quux', + 'Corge', 'Corge.grault'} + assert tags['Foo'] == ('class', 1, 12) # type, start, end + assert tags['Foo.__init__'] == ('def', 3, 4) + assert tags['Foo.bar'] == ('def', 6, 8) + assert tags['Foo.Baz'] == ('class', 10, 12) + assert tags['Foo.Baz.__init__'] == ('def', 11, 12) + assert tags['qux'] == ('def', 14, 16) + assert tags['quux'] == ('def', 18, 21) + assert tags['Corge'] == ('class', 23, 27) + assert tags['Corge.grault'] == ('def', 24, 27) + + +def test_ModuleAnalyzer_find_attr_docs(): + code = ('class Foo(object):\n' + ' """class Foo!"""\n' + ' #: comment before attr1\n' + ' attr1 = None\n' + ' attr2 = None # attribute comment for attr2 (without colon)\n' + ' attr3 = None #: attribute comment for attr3\n' + ' attr4 = None #: long attribute comment\n' + ' #: for attr4\n' + ' #: comment before attr5\n' + ' attr5 = None #: attribute comment for attr5\n' + ' attr6, attr7 = 1, 2 #: this comment is ignored\n' + '\n' + ' def __init__(self):\n' + ' self.attr8 = None #: first attribute comment (ignored)\n' + ' self.attr8 = None #: attribute comment for attr8\n' + ' #: comment before attr9\n' + ' self.attr9 = None #: comment after attr9\n' + ' "string after attr9"\n' + '\n' + ' def bar(self, arg1, arg2=True, *args, **kwargs):\n' + ' """method Foo.bar"""\n' + ' pass\n' + '\n' + 'def baz():\n' + ' """function baz"""\n' + ' pass\n' + '\n' + 'class Qux: attr1 = 1; attr2 = 2') + analyzer = ModuleAnalyzer.for_string(code, 'module') + docs = analyzer.find_attr_docs() + assert set(docs) == {('Foo', 'attr1'), + ('Foo', 'attr3'), + ('Foo', 'attr4'), + ('Foo', 'attr5'), + ('Foo', 'attr6'), + ('Foo', 'attr7'), + ('Foo', 'attr8'), + ('Foo', 'attr9')} + assert docs[('Foo', 'attr1')] == ['comment before attr1', ''] + assert docs[('Foo', 'attr3')] == ['attribute comment for attr3', ''] + assert docs[('Foo', 'attr4')] == ['long attribute comment', ''] + assert docs[('Foo', 'attr4')] == ['long attribute comment', ''] + assert docs[('Foo', 'attr5')] == ['attribute comment for attr5', ''] + assert docs[('Foo', 'attr6')] == ['this comment is ignored', ''] + assert docs[('Foo', 'attr7')] == ['this comment is ignored', ''] + assert docs[('Foo', 'attr8')] == ['attribute comment for attr8', ''] + assert docs[('Foo', 'attr9')] == ['string after attr9', ''] + assert analyzer.tagorder == {'Foo': 0, + 'Foo.__init__': 8, + 'Foo.attr1': 1, + 'Foo.attr2': 2, + 'Foo.attr3': 3, + 'Foo.attr4': 4, + 'Foo.attr5': 5, + 'Foo.attr6': 6, + 'Foo.attr7': 7, + 'Foo.attr8': 10, + 'Foo.attr9': 12, + 'Foo.bar': 13, + 'baz': 14, + 'Qux': 15, + 'Qux.attr1': 16, + 'Qux.attr2': 17} + + +def test_ModuleAnalyzer_find_attr_docs_for_posonlyargs_method(): + code = ('class Foo(object):\n' + ' def __init__(self, /):\n' + ' self.attr = None #: attribute comment\n') + analyzer = ModuleAnalyzer.for_string(code, 'module') + docs = analyzer.find_attr_docs() + assert set(docs) == {('Foo', 'attr')} + assert docs[('Foo', 'attr')] == ['attribute comment', ''] + assert analyzer.tagorder == {'Foo': 0, + 'Foo.__init__': 1, + 'Foo.attr': 2} diff --git a/tests/test_pycode_ast.py b/tests/test_pycode_ast.py new file mode 100644 index 0000000..5efd0cb --- /dev/null +++ b/tests/test_pycode_ast.py @@ -0,0 +1,62 @@ +"""Test pycode.ast""" + +import ast + +import pytest + +from sphinx.pycode.ast import unparse as ast_unparse + + +@pytest.mark.parametrize(('source', 'expected'), [ + ("a + b", "a + b"), # Add + ("a and b", "a and b"), # And + ("os.path", "os.path"), # Attribute + ("1 * 2", "1 * 2"), # BinOp + ("a & b", "a & b"), # BitAnd + ("a | b", "a | b"), # BitOr + ("a ^ b", "a ^ b"), # BitXor + ("a and b and c", "a and b and c"), # BoolOp + ("b'bytes'", "b'bytes'"), # Bytes + ("object()", "object()"), # Call + ("1234", "1234"), # Constant + ("{'key1': 'value1', 'key2': 'value2'}", + "{'key1': 'value1', 'key2': 'value2'}"), # Dict + ("a / b", "a / b"), # Div + ("...", "..."), # Ellipsis + ("a // b", "a // b"), # FloorDiv + ("Tuple[int, int]", "Tuple[int, int]"), # Index, Subscript + ("~1", "~1"), # Invert + ("lambda x, y: x + y", + "lambda x, y: ..."), # Lambda + ("[1, 2, 3]", "[1, 2, 3]"), # List + ("a << b", "a << b"), # LShift + ("a @ b", "a @ b"), # MatMult + ("a % b", "a % b"), # Mod + ("a * b", "a * b"), # Mult + ("sys", "sys"), # Name, NameConstant + ("1234", "1234"), # Num + ("not a", "not a"), # Not + ("a or b", "a or b"), # Or + ("a**b", "a**b"), # Pow + ("a >> b", "a >> b"), # RShift + ("{1, 2, 3}", "{1, 2, 3}"), # Set + ("a - b", "a - b"), # Sub + ("'str'", "'str'"), # Str + ("+a", "+a"), # UAdd + ("-1", "-1"), # UnaryOp + ("-a", "-a"), # USub + ("(1, 2, 3)", "(1, 2, 3)"), # Tuple + ("()", "()"), # Tuple (empty) + ("(1,)", "(1,)"), # Tuple (single item) + ("lambda x=0, /, y=1, *args, z, **kwargs: x + y + z", + "lambda x=0, /, y=1, *args, z, **kwargs: ..."), # posonlyargs + ("0x1234", "0x1234"), # Constant + ("1_000_000", "1_000_000"), # Constant +]) +def test_unparse(source, expected): + module = ast.parse(source) + assert ast_unparse(module.body[0].value, source) == expected + + +def test_unparse_None(): + assert ast_unparse(None) is None diff --git a/tests/test_pycode_parser.py b/tests/test_pycode_parser.py new file mode 100644 index 0000000..fde648d --- /dev/null +++ b/tests/test_pycode_parser.py @@ -0,0 +1,511 @@ +"""Test pycode.parser.""" + +from sphinx.pycode.parser import Parser +from sphinx.util.inspect import signature_from_str + + +def test_comment_picker_basic(): + source = ('a = 1 + 1 #: assignment\n' + 'b = 1 +\\\n 1 #: assignment including a CR\n' + 'c = (1 +\n 1) #: tuple \n' + 'd = {1, \n 1} #: set\n' + 'e = [1, \n 1] #: list #: additional comment\n' + 'f = "abc"\n' + '#: string; comment on next line (ignored)\n' + 'g = 1.0\n' + '"""float; string on next line"""\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('', 'a'): 'assignment', + ('', 'b'): 'assignment including a CR', + ('', 'c'): 'tuple ', + ('', 'd'): ' set', + ('', 'e'): 'list #: additional comment', + ('', 'g'): 'float; string on next line'} + + +def test_comment_picker_location(): + # multiple "before" comments + source = ('#: comment before assignment1\n' + '#:\n' + '#: comment before assignment2\n' + 'a = 1 + 1\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('', 'a'): ('comment before assignment1\n' + '\n' + 'comment before assignment2')} + + # before and after comments + source = ('#: comment before assignment\n' + 'a = 1 + 1 #: comment after assignment\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('', 'a'): 'comment after assignment'} + + # after comment and next line string + source = ('a = 1 + 1\n #: comment after assignment\n' + '"""string on next line"""\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('', 'a'): 'string on next line'} + + # before comment and next line string + source = ('#: comment before assignment\n' + 'a = 1 + 1\n' + '"""string on next line"""\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('', 'a'): 'string on next line'} + + # before comment, after comment and next line string + source = ('#: comment before assignment\n' + 'a = 1 + 1 #: comment after assignment\n' + '"""string on next line"""\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('', 'a'): 'string on next line'} + + # inside __init__ method + source = ('class Foo(object):\n' + ' def __init__(self):\n' + ' #: comment before assignment\n' + ' self.attr1 = None\n' + ' self.attr2 = None #: comment after assignment\n' + '\n' + ' #: comment for attr3(1)\n' + ' self.attr3 = None #: comment for attr3(2)\n' + ' """comment for attr3(3)"""\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('Foo', 'attr1'): 'comment before assignment', + ('Foo', 'attr2'): 'comment after assignment', + ('Foo', 'attr3'): 'comment for attr3(3)'} + + +def test_annotated_assignment(): + source = ('a: str = "Sphinx" #: comment\n' + 'b: int = 1\n' + '"""string on next line"""\n' + 'c: int #: comment\n' + 'd = 1 # type: int\n' + '"""string on next line"""\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('', 'a'): 'comment', + ('', 'b'): 'string on next line', + ('', 'c'): 'comment', + ('', 'd'): 'string on next line'} + assert parser.annotations == {('', 'a'): 'str', + ('', 'b'): 'int', + ('', 'c'): 'int', + ('', 'd'): 'int'} + assert parser.definitions == {} + + +def test_complex_assignment(): + source = ('a = 1 + 1; b = a #: compound statement\n' + 'c, d = (1, 1) #: unpack assignment\n' + 'e = True #: first assignment\n' + 'e = False #: second assignment\n' + 'f = g = None #: multiple assignment at once\n' + '(theta, phi) = (0, 0.5) #: unpack assignment via tuple\n' + '[x, y] = (5, 6) #: unpack assignment via list\n' + 'h, *i, j = (1, 2, 3, 4) #: unpack assignment2\n' + 'k, *self.attr = (5, 6, 7) #: unpack assignment3\n' + 'l, *m[0] = (8, 9, 0) #: unpack assignment4\n' + ) + parser = Parser(source) + parser.parse() + assert parser.comments == {('', 'b'): 'compound statement', + ('', 'c'): 'unpack assignment', + ('', 'd'): 'unpack assignment', + ('', 'e'): 'second assignment', + ('', 'f'): 'multiple assignment at once', + ('', 'g'): 'multiple assignment at once', + ('', 'theta'): 'unpack assignment via tuple', + ('', 'phi'): 'unpack assignment via tuple', + ('', 'x'): 'unpack assignment via list', + ('', 'y'): 'unpack assignment via list', + ('', 'h'): 'unpack assignment2', + ('', 'i'): 'unpack assignment2', + ('', 'j'): 'unpack assignment2', + ('', 'k'): 'unpack assignment3', + ('', 'l'): 'unpack assignment4', + } + assert parser.definitions == {} + + +def test_assignment_in_try_clause(): + source = ('try:\n' + ' a = None #: comment\n' + 'except:\n' + ' b = None #: ignored\n' + 'else:\n' + ' c = None #: comment\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('', 'a'): 'comment', + ('', 'c'): 'comment'} + assert parser.deforders == {'a': 0, + 'c': 1} + + +def test_obj_assignment(): + source = ('obj = SomeObject() #: some object\n' + 'obj.attr = 1 #: attr1\n' + 'obj.attr.attr = 1 #: attr2\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('', 'obj'): 'some object'} + assert parser.definitions == {} + + +def test_container_assignment(): + source = ('l = [] #: list\n' + 'l[1] = True #: list assignment\n' + 'l[0:0] = [] #: list assignment\n' + 'l[_from:_to] = [] #: list assignment\n' + 'd = {} #: dict\n' + 'd["doc"] = 1 #: dict assignment\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('', 'l'): 'list', + ('', 'd'): 'dict'} + assert parser.definitions == {} + + +def test_function(): + source = ('def some_function():\n' + ' """docstring"""\n' + ' a = 1 + 1 #: comment1\n' + '\n' + ' b = a #: comment2\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {} + assert parser.definitions == {'some_function': ('def', 1, 5)} + assert parser.deforders == {'some_function': 0} + + +def test_nested_function(): + source = ('def some_function():\n' + ' a = 1 + 1 #: comment1\n' + '\n' + ' def inner_function():\n' + ' b = 1 + 1 #: comment2\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {} + assert parser.definitions == {'some_function': ('def', 1, 5)} + assert parser.deforders == {'some_function': 0} + + +def test_class(): + source = ('class Foo(object):\n' + ' attr1 = None #: comment1\n' + ' attr2 = None #: comment2\n' + '\n' + ' def __init__(self):\n' + ' self.a = 1 + 1 #: comment3\n' + ' self.attr2 = 1 + 1 #: overridden\n' + ' b = 1 + 1 #: comment5\n' + '\n' + ' def some_method(self):\n' + ' c = 1 + 1 #: comment6\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('Foo', 'attr1'): 'comment1', + ('Foo', 'a'): 'comment3', + ('Foo', 'attr2'): 'overridden'} + assert parser.definitions == {'Foo': ('class', 1, 11), + 'Foo.__init__': ('def', 5, 8), + 'Foo.some_method': ('def', 10, 11)} + assert parser.deforders == {'Foo': 0, + 'Foo.attr1': 1, + 'Foo.__init__': 3, + 'Foo.a': 4, + 'Foo.attr2': 5, + 'Foo.some_method': 6} + + +def test_class_uses_non_self(): + source = ('class Foo(object):\n' + ' def __init__(this):\n' + ' this.a = 1 + 1 #: comment\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('Foo', 'a'): 'comment'} + assert parser.definitions == {'Foo': ('class', 1, 3), + 'Foo.__init__': ('def', 2, 3)} + assert parser.deforders == {'Foo': 0, + 'Foo.__init__': 1, + 'Foo.a': 2} + + +def test_nested_class(): + source = ('class Foo(object):\n' + ' attr1 = None #: comment1\n' + '\n' + ' class Bar(object):\n' + ' attr2 = None #: comment2\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('Foo', 'attr1'): 'comment1', + ('Foo.Bar', 'attr2'): 'comment2'} + assert parser.definitions == {'Foo': ('class', 1, 5), + 'Foo.Bar': ('class', 4, 5)} + assert parser.deforders == {'Foo': 0, + 'Foo.attr1': 1, + 'Foo.Bar': 2, + 'Foo.Bar.attr2': 3} + + +def test_class_comment(): + source = ('import logging\n' + 'logger = logging.getLogger(__name__)\n' + '\n' + 'class Foo(object):\n' + ' """Bar"""\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {} + assert parser.definitions == {'Foo': ('class', 4, 5)} + + +def test_comment_picker_multiline_string(): + source = ('class Foo(object):\n' + ' a = None\n' + ' """multiline\n' + ' docstring\n' + ' """\n' + ' b = None\n' + ' """\n' + ' docstring\n' + ' starts with::\n' + '\n' + ' empty line"""\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('Foo', 'a'): 'multiline\ndocstring', + ('Foo', 'b'): 'docstring\nstarts with::\n\n empty line'} + + +def test_decorators(): + source = ('@deco\n' + 'def func1(): pass\n' + '\n' + '@deco(param1, param2)\n' + 'def func2(): pass\n' + '\n' + '@deco1\n' + '@deco2\n' + 'def func3(): pass\n' + '\n' + '@deco\n' + 'class Foo():\n' + ' @deco1\n' + ' @deco2\n' + ' def method(self): pass\n') + parser = Parser(source) + parser.parse() + assert parser.definitions == {'func1': ('def', 1, 2), + 'func2': ('def', 4, 5), + 'func3': ('def', 7, 9), + 'Foo': ('class', 11, 15), + 'Foo.method': ('def', 13, 15)} + + +def test_async_function_and_method(): + source = ('async def some_function():\n' + ' """docstring"""\n' + ' a = 1 + 1 #: comment1\n' + '\n' + 'class Foo:\n' + ' async def method(self):\n' + ' pass\n') + parser = Parser(source) + parser.parse() + assert parser.definitions == {'some_function': ('def', 1, 3), + 'Foo': ('class', 5, 7), + 'Foo.method': ('def', 6, 7)} + + +def test_imports(): + source = ('import sys\n' + 'from os import environment, path\n' + '\n' + 'import sphinx as Sphinx\n' + 'from sphinx.application import Sphinx as App\n') + parser = Parser(source) + parser.parse() + assert parser.definitions == {} + assert parser.deforders == {'sys': 0, + 'environment': 1, + 'path': 2, + 'Sphinx': 3, + 'App': 4} + + +def test_formfeed_char(): + source = ('class Foo:\n' + '\f\n' + ' attr = 1234 #: comment\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('Foo', 'attr'): 'comment'} + + +def test_typing_final(): + source = ('import typing\n' + '\n' + '@typing.final\n' + 'def func(): pass\n' + '\n' + '@typing.final\n' + 'class Foo:\n' + ' @typing.final\n' + ' def meth(self):\n' + ' pass\n') + parser = Parser(source) + parser.parse() + assert parser.finals == ['func', 'Foo', 'Foo.meth'] + + +def test_typing_final_from_import(): + source = ('from typing import final\n' + '\n' + '@final\n' + 'def func(): pass\n' + '\n' + '@final\n' + 'class Foo:\n' + ' @final\n' + ' def meth(self):\n' + ' pass\n') + parser = Parser(source) + parser.parse() + assert parser.finals == ['func', 'Foo', 'Foo.meth'] + + +def test_typing_final_import_as(): + source = ('import typing as foo\n' + '\n' + '@foo.final\n' + 'def func(): pass\n' + '\n' + '@foo.final\n' + 'class Foo:\n' + ' @typing.final\n' + ' def meth(self):\n' + ' pass\n') + parser = Parser(source) + parser.parse() + assert parser.finals == ['func', 'Foo'] + + +def test_typing_final_from_import_as(): + source = ('from typing import final as bar\n' + '\n' + '@bar\n' + 'def func(): pass\n' + '\n' + '@bar\n' + 'class Foo:\n' + ' @final\n' + ' def meth(self):\n' + ' pass\n') + parser = Parser(source) + parser.parse() + assert parser.finals == ['func', 'Foo'] + + +def test_typing_final_not_imported(): + source = ('@typing.final\n' + 'def func(): pass\n' + '\n' + '@typing.final\n' + 'class Foo:\n' + ' @final\n' + ' def meth(self):\n' + ' pass\n') + parser = Parser(source) + parser.parse() + assert parser.finals == [] + + +def test_typing_overload(): + source = ('import typing\n' + '\n' + '@typing.overload\n' + 'def func(x: int, y: int) -> int: pass\n' + '\n' + '@typing.overload\n' + 'def func(x: str, y: str) -> str: pass\n' + '\n' + 'def func(x, y): pass\n') + parser = Parser(source) + parser.parse() + assert parser.overloads == {'func': [signature_from_str('(x: int, y: int) -> int'), + signature_from_str('(x: str, y: str) -> str')]} + + +def test_typing_overload_from_import(): + source = ('from typing import overload\n' + '\n' + '@overload\n' + 'def func(x: int, y: int) -> int: pass\n' + '\n' + '@overload\n' + 'def func(x: str, y: str) -> str: pass\n' + '\n' + 'def func(x, y): pass\n') + parser = Parser(source) + parser.parse() + assert parser.overloads == {'func': [signature_from_str('(x: int, y: int) -> int'), + signature_from_str('(x: str, y: str) -> str')]} + + +def test_typing_overload_import_as(): + source = ('import typing as foo\n' + '\n' + '@foo.overload\n' + 'def func(x: int, y: int) -> int: pass\n' + '\n' + '@foo.overload\n' + 'def func(x: str, y: str) -> str: pass\n' + '\n' + 'def func(x, y): pass\n') + parser = Parser(source) + parser.parse() + assert parser.overloads == {'func': [signature_from_str('(x: int, y: int) -> int'), + signature_from_str('(x: str, y: str) -> str')]} + + +def test_typing_overload_from_import_as(): + source = ('from typing import overload as bar\n' + '\n' + '@bar\n' + 'def func(x: int, y: int) -> int: pass\n' + '\n' + '@bar\n' + 'def func(x: str, y: str) -> str: pass\n' + '\n' + 'def func(x, y): pass\n') + parser = Parser(source) + parser.parse() + assert parser.overloads == {'func': [signature_from_str('(x: int, y: int) -> int'), + signature_from_str('(x: str, y: str) -> str')]} + + +def test_typing_overload_not_imported(): + source = ('@typing.final\n' + 'def func(x: int, y: int) -> int: pass\n' + '\n' + '@typing.final\n' + 'def func(x: str, y: str) -> str: pass\n' + '\n' + 'def func(x, y): pass\n') + parser = Parser(source) + parser.parse() + assert parser.overloads == {} diff --git a/tests/test_quickstart.py b/tests/test_quickstart.py new file mode 100644 index 0000000..6a9f5c7 --- /dev/null +++ b/tests/test_quickstart.py @@ -0,0 +1,260 @@ +"""Test the sphinx.quickstart module.""" + +import time +from io import StringIO +from os import path + +import pytest + +from sphinx import application +from sphinx.cmd import quickstart as qs +from sphinx.util.console import coloron, nocolor + +warnfile = StringIO() + + +def setup_module(): + nocolor() + + +def mock_input(answers, needanswer=False): + called = set() + + def input_(prompt): + if prompt in called: + raise AssertionError('answer for %r missing and no default ' + 'present' % prompt) + called.add(prompt) + for question in answers: + if prompt.startswith(qs.PROMPT_PREFIX + question): + return answers[question] + if needanswer: + raise AssertionError('answer for %r missing' % prompt) + return '' + return input_ + + +real_input = input + + +def teardown_module(): + qs.term_input = real_input + coloron() + + +def test_do_prompt(): + answers = { + 'Q2': 'v2', + 'Q3': 'v3', + 'Q4': 'yes', + 'Q5': 'no', + 'Q6': 'foo', + } + qs.term_input = mock_input(answers) + + assert qs.do_prompt('Q1', default='v1') == 'v1' + assert qs.do_prompt('Q3', default='v3_default') == 'v3' + assert qs.do_prompt('Q2') == 'v2' + assert qs.do_prompt('Q4', validator=qs.boolean) is True + assert qs.do_prompt('Q5', validator=qs.boolean) is False + with pytest.raises(AssertionError): + qs.do_prompt('Q6', validator=qs.boolean) + + +def test_do_prompt_inputstrip(): + answers = { + 'Q1': 'Y', + 'Q2': ' Yes ', + 'Q3': 'N', + 'Q4': 'N ', + } + qs.term_input = mock_input(answers) + + assert qs.do_prompt('Q1') == 'Y' + assert qs.do_prompt('Q2') == 'Yes' + assert qs.do_prompt('Q3') == 'N' + assert qs.do_prompt('Q4') == 'N' + + +def test_do_prompt_with_nonascii(): + answers = { + 'Q1': '\u30c9\u30a4\u30c4', + } + qs.term_input = mock_input(answers) + result = qs.do_prompt('Q1', default='\u65e5\u672c') + assert result == '\u30c9\u30a4\u30c4' + + +def test_quickstart_defaults(tmp_path): + answers = { + 'Root path': str(tmp_path), + 'Project name': 'Sphinx Test', + 'Author name': 'Georg Brandl', + 'Project version': '0.1', + } + qs.term_input = mock_input(answers) + d = {} + qs.ask_user(d) + qs.generate(d) + + conffile = tmp_path / 'conf.py' + assert conffile.is_file() + ns = {} + exec(conffile.read_text(encoding='utf8'), ns) # NoQA: S102 + assert ns['extensions'] == [] + assert ns['templates_path'] == ['_templates'] + assert ns['project'] == 'Sphinx Test' + assert ns['copyright'] == '%s, Georg Brandl' % time.strftime('%Y') + assert ns['version'] == '0.1' + assert ns['release'] == '0.1' + assert ns['html_static_path'] == ['_static'] + + assert (tmp_path / '_static').is_dir() + assert (tmp_path / '_templates').is_dir() + assert (tmp_path / 'index.rst').is_file() + assert (tmp_path / 'Makefile').is_file() + assert (tmp_path / 'make.bat').is_file() + + +def test_quickstart_all_answers(tmp_path): + answers = { + 'Root path': str(tmp_path), + 'Separate source and build': 'y', + 'Name prefix for templates': '.', + 'Project name': 'STASI™', + 'Author name': "Wolfgang Schäuble & G'Beckstein", + 'Project version': '2.0', + 'Project release': '2.0.1', + 'Project language': 'de', + 'Source file suffix': '.txt', + 'Name of your master document': 'contents', + 'autodoc': 'y', + 'doctest': 'yes', + 'intersphinx': 'no', + 'todo': 'y', + 'coverage': 'no', + 'imgmath': 'N', + 'mathjax': 'no', + 'ifconfig': 'no', + 'viewcode': 'no', + 'githubpages': 'no', + 'Create Makefile': 'no', + 'Create Windows command file': 'no', + 'Do you want to use the epub builder': 'yes', + } + qs.term_input = mock_input(answers, needanswer=True) + d = {} + qs.ask_user(d) + qs.generate(d) + + conffile = tmp_path / 'source' / 'conf.py' + assert conffile.is_file() + ns = {} + exec(conffile.read_text(encoding='utf8'), ns) # NoQA: S102 + assert ns['extensions'] == [ + 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo', + ] + assert ns['templates_path'] == ['.templates'] + assert ns['source_suffix'] == '.txt' + assert ns['root_doc'] == 'contents' + assert ns['project'] == 'STASI™' + assert ns['copyright'] == "%s, Wolfgang Schäuble & G'Beckstein" % \ + time.strftime('%Y') + assert ns['version'] == '2.0' + assert ns['release'] == '2.0.1' + assert ns['todo_include_todos'] is True + assert ns['html_static_path'] == ['.static'] + + assert (tmp_path / 'build').is_dir() + assert (tmp_path / 'source' / '.static').is_dir() + assert (tmp_path / 'source' / '.templates').is_dir() + assert (tmp_path / 'source' / 'contents.txt').is_file() + + +def test_generated_files_eol(tmp_path): + answers = { + 'Root path': str(tmp_path), + 'Project name': 'Sphinx Test', + 'Author name': 'Georg Brandl', + 'Project version': '0.1', + } + qs.term_input = mock_input(answers) + d = {} + qs.ask_user(d) + qs.generate(d) + + def assert_eol(filename, eol): + content = filename.read_bytes().decode() + assert all(l[-len(eol):] == eol for l in content.splitlines(keepends=True)) + + assert_eol(tmp_path / 'make.bat', '\r\n') + assert_eol(tmp_path / 'Makefile', '\n') + + +def test_quickstart_and_build(tmp_path): + answers = { + 'Root path': str(tmp_path), + 'Project name': 'Fullwidth characters: \u30c9\u30a4\u30c4', + 'Author name': 'Georg Brandl', + 'Project version': '0.1', + } + qs.term_input = mock_input(answers) + d = {} + qs.ask_user(d) + qs.generate(d) + + app = application.Sphinx( + tmp_path, # srcdir + tmp_path, # confdir + (tmp_path / '_build' / 'html'), # outdir + (tmp_path / '_build' / '.doctree'), # doctreedir + 'html', # buildername + status=StringIO(), + warning=warnfile) + app.builder.build_all() + warnings = warnfile.getvalue() + assert not warnings + + +def test_default_filename(tmp_path): + answers = { + 'Root path': str(tmp_path), + 'Project name': '\u30c9\u30a4\u30c4', # Fullwidth characters only + 'Author name': 'Georg Brandl', + 'Project version': '0.1', + } + qs.term_input = mock_input(answers) + d = {} + qs.ask_user(d) + qs.generate(d) + + conffile = tmp_path / 'conf.py' + assert conffile.is_file() + ns = {} + exec(conffile.read_text(encoding='utf8'), ns) # NoQA: S102 + + +def test_extensions(tmp_path): + qs.main(['-q', '-p', 'project_name', '-a', 'author', + '--extensions', 'foo,bar,baz', str(tmp_path)]) + + conffile = tmp_path / 'conf.py' + assert conffile.is_file() + ns = {} + exec(conffile.read_text(encoding='utf8'), ns) # NoQA: S102 + assert ns['extensions'] == ['foo', 'bar', 'baz'] + + +def test_exits_when_existing_confpy(monkeypatch): + # The code detects existing conf.py with path.is_file() + # so we mock it as True with pytest's monkeypatch + def mock_isfile(path): + return True + monkeypatch.setattr(path, 'isfile', mock_isfile) + + qs.term_input = mock_input({ + 'Please enter a new root path (or just Enter to exit)': '', + }) + d = {} + with pytest.raises(SystemExit): + qs.ask_user(d) diff --git a/tests/test_roles.py b/tests/test_roles.py new file mode 100644 index 0000000..67a13c8 --- /dev/null +++ b/tests/test_roles.py @@ -0,0 +1,75 @@ +"""Test sphinx.roles""" + +from unittest.mock import Mock + +from docutils import nodes + +from sphinx.roles import EmphasizedLiteral +from sphinx.testing.util import assert_node + + +def test_samp(): + emph_literal_role = EmphasizedLiteral() + + # normal case + text = 'print 1+{variable}' + ret, msg = emph_literal_role('samp', text, text, 0, Mock()) + assert_node(ret[0], [nodes.literal, ("print 1+", + [nodes.emphasis, "variable"])]) + assert msg == [] + + # two emphasis items + text = 'print {1}+{variable}' + ret, msg = emph_literal_role('samp', text, text, 0, Mock()) + assert_node(ret[0], [nodes.literal, ("print ", + [nodes.emphasis, "1"], + "+", + [nodes.emphasis, "variable"])]) + assert msg == [] + + # empty curly brace + text = 'print 1+{}' + ret, msg = emph_literal_role('samp', text, text, 0, Mock()) + assert_node(ret[0], [nodes.literal, "print 1+{}"]) + assert msg == [] + + # half-opened variable + text = 'print 1+{variable' + ret, msg = emph_literal_role('samp', text, text, 0, Mock()) + assert_node(ret[0], [nodes.literal, "print 1+{variable"]) + assert msg == [] + + # nested + text = 'print 1+{{variable}}' + ret, msg = emph_literal_role('samp', text, text, 0, Mock()) + assert_node(ret[0], [nodes.literal, ("print 1+", + [nodes.emphasis, "{variable"], + "}")]) + assert msg == [] + + # emphasized item only + text = '{variable}' + ret, msg = emph_literal_role('samp', text, text, 0, Mock()) + assert_node(ret[0], [nodes.literal, nodes.emphasis, "variable"]) + assert msg == [] + + # escaping + text = r'print 1+\{variable}' + ret, msg = emph_literal_role('samp', text, text, 0, Mock()) + assert_node(ret[0], [nodes.literal, "print 1+{variable}"]) + assert msg == [] + + # escaping (2) + text = r'print 1+\{{variable}\}' + ret, msg = emph_literal_role('samp', text, text, 0, Mock()) + assert_node(ret[0], [nodes.literal, ("print 1+{", + [nodes.emphasis, "variable"], + "}")]) + assert msg == [] + + # escape a backslash + text = r'print 1+\\{variable}' + ret, msg = emph_literal_role('samp', text, text, 0, Mock()) + assert_node(ret[0], [nodes.literal, ("print 1+\\", + [nodes.emphasis, "variable"])]) + assert msg == [] diff --git a/tests/test_search.py b/tests/test_search.py new file mode 100644 index 0000000..68a7b01 --- /dev/null +++ b/tests/test_search.py @@ -0,0 +1,306 @@ +"""Test the search index builder.""" + +import json +import warnings +from io import BytesIO + +import pytest +from docutils import frontend, utils +from docutils.parsers import rst + +from sphinx.search import IndexBuilder + + +class DummyEnvironment: + def __init__(self, version, domains): + self.version = version + self.domains = domains + + def __getattr__(self, name): + if name.startswith('_search_index_'): + setattr(self, name, {}) + return getattr(self, name, {}) + + def __str__(self): + return f'DummyEnvironment({self.version!r}, {self.domains!r})' + + +class DummyDomain: + def __init__(self, data): + self.data = data + self.object_types = {} + + def get_objects(self): + return self.data + + +settings = parser = None + + +def setup_module(): + global settings, parser + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', category=DeprecationWarning) + # DeprecationWarning: The frontend.OptionParser class will be replaced + # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later. + optparser = frontend.OptionParser(components=(rst.Parser,)) + settings = optparser.get_default_values() + parser = rst.Parser() + + +def load_searchindex(path): + searchindex = path.read_text(encoding='utf8') + assert searchindex.startswith('Search.setIndex(') + assert searchindex.endswith(')') + + return json.loads(searchindex[16:-1]) + + +def is_registered_term(index, keyword): + return index['terms'].get(keyword, []) != [] + + +FILE_CONTENTS = '''\ +section_title +============= + +.. test that comments are not indexed: boson + +test that non-comments are indexed: fermion +''' + + +@pytest.mark.sphinx(testroot='ext-viewcode') +def test_objects_are_escaped(app): + app.builder.build_all() + index = load_searchindex(app.outdir / 'searchindex.js') + for item in index.get('objects').get(''): + if item[-1] == 'n::Array<T, d>': # n::Array<T,d> is escaped + break + else: + raise AssertionError(index.get('objects').get('')) + + +@pytest.mark.sphinx(testroot='search') +def test_meta_keys_are_handled_for_language_en(app): + app.builder.build_all() + searchindex = load_searchindex(app.outdir / 'searchindex.js') + assert not is_registered_term(searchindex, 'thisnoteith') + assert is_registered_term(searchindex, 'thisonetoo') + assert is_registered_term(searchindex, 'findthiskei') + assert is_registered_term(searchindex, 'thistoo') + assert not is_registered_term(searchindex, 'onlygerman') + assert is_registered_term(searchindex, 'notgerman') + assert not is_registered_term(searchindex, 'onlytoogerman') + + +@pytest.mark.sphinx(testroot='search', confoverrides={'html_search_language': 'de'}, freshenv=True) +def test_meta_keys_are_handled_for_language_de(app): + app.builder.build_all() + searchindex = load_searchindex(app.outdir / 'searchindex.js') + assert not is_registered_term(searchindex, 'thisnoteith') + assert is_registered_term(searchindex, 'thisonetoo') + assert not is_registered_term(searchindex, 'findthiskei') + assert not is_registered_term(searchindex, 'thistoo') + assert is_registered_term(searchindex, 'onlygerman') + assert not is_registered_term(searchindex, 'notgerman') + assert is_registered_term(searchindex, 'onlytoogerman') + + +@pytest.mark.sphinx(testroot='search') +def test_stemmer_does_not_remove_short_words(app): + app.builder.build_all() + searchindex = (app.outdir / 'searchindex.js').read_text(encoding='utf8') + assert 'bat' in searchindex + + +@pytest.mark.sphinx(testroot='search') +def test_stemmer(app): + app.builder.build_all() + searchindex = load_searchindex(app.outdir / 'searchindex.js') + print(searchindex) + assert is_registered_term(searchindex, 'findthisstemmedkei') + assert is_registered_term(searchindex, 'intern') + + +@pytest.mark.sphinx(testroot='search') +def test_term_in_heading_and_section(app): + app.builder.build_all() + searchindex = (app.outdir / 'searchindex.js').read_text(encoding='utf8') + # if search term is in the title of one doc and in the text of another + # both documents should be a hit in the search index as a title, + # respectively text hit + assert '"textinhead": 2' in searchindex + assert '"textinhead": 0' in searchindex + + +@pytest.mark.sphinx(testroot='search') +def test_term_in_raw_directive(app): + app.builder.build_all() + searchindex = load_searchindex(app.outdir / 'searchindex.js') + assert not is_registered_term(searchindex, 'raw') + assert is_registered_term(searchindex, 'rawword') + assert not is_registered_term(searchindex, 'latex_keyword') + + +def test_IndexBuilder(): + domain1 = DummyDomain([('objname1', 'objdispname1', 'objtype1', 'docname1_1', '#anchor', 1), + ('objname2', 'objdispname2', 'objtype2', 'docname1_2', '', -1)]) + domain2 = DummyDomain([('objname1', 'objdispname1', 'objtype1', 'docname2_1', '#anchor', 1), + ('objname2', 'objdispname2', 'objtype2', 'docname2_2', '', -1)]) + env = DummyEnvironment('1.0', {'dummy1': domain1, 'dummy2': domain2}) + doc = utils.new_document(b'test data', settings) + doc['file'] = 'dummy' + parser.parse(FILE_CONTENTS, doc) + + # feed + index = IndexBuilder(env, 'en', {}, None) + index.feed('docname1_1', 'filename1_1', 'title1_1', doc) + index.feed('docname1_2', 'filename1_2', 'title1_2', doc) + index.feed('docname2_1', 'filename2_1', 'title2_1', doc) + index.feed('docname2_2', 'filename2_2', 'title2_2', doc) + assert index._titles == {'docname1_1': 'title1_1', 'docname1_2': 'title1_2', + 'docname2_1': 'title2_1', 'docname2_2': 'title2_2'} + assert index._filenames == {'docname1_1': 'filename1_1', 'docname1_2': 'filename1_2', + 'docname2_1': 'filename2_1', 'docname2_2': 'filename2_2'} + assert index._mapping == { + 'ar': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'}, + 'fermion': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'}, + 'comment': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'}, + 'non': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'}, + 'index': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'}, + 'test': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'}, + } + assert index._title_mapping == {'section_titl': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'}} + assert index._objtypes == {} + assert index._objnames == {} + + # freeze + assert index.freeze() == { + 'docnames': ('docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'), + 'envversion': '1.0', + 'filenames': ['filename1_1', 'filename1_2', 'filename2_1', 'filename2_2'], + 'objects': {'': [(0, 0, 1, '#anchor', 'objdispname1'), + (2, 1, 1, '#anchor', 'objdispname1')]}, + 'objnames': {0: ('dummy1', 'objtype1', 'objtype1'), 1: ('dummy2', 'objtype1', 'objtype1')}, + 'objtypes': {0: 'dummy1:objtype1', 1: 'dummy2:objtype1'}, + 'terms': {'ar': [0, 1, 2, 3], + 'comment': [0, 1, 2, 3], + 'fermion': [0, 1, 2, 3], + 'index': [0, 1, 2, 3], + 'non': [0, 1, 2, 3], + 'test': [0, 1, 2, 3]}, + 'titles': ('title1_1', 'title1_2', 'title2_1', 'title2_2'), + 'titleterms': {'section_titl': [0, 1, 2, 3]}, + 'alltitles': {'section_title': [(0, 'section-title'), (1, 'section-title'), (2, 'section-title'), (3, 'section-title')]}, + 'indexentries': {}, + } + assert index._objtypes == {('dummy1', 'objtype1'): 0, ('dummy2', 'objtype1'): 1} + assert index._objnames == {0: ('dummy1', 'objtype1', 'objtype1'), + 1: ('dummy2', 'objtype1', 'objtype1')} + + env = DummyEnvironment('1.0', {'dummy1': domain1, 'dummy2': domain2}) + + # dump / load + stream = BytesIO() + index.dump(stream, 'pickle') + stream.seek(0) + + index2 = IndexBuilder(env, 'en', {}, None) + index2.load(stream, 'pickle') + + assert index2._titles == index._titles + assert index2._filenames == index._filenames + assert index2._mapping == index._mapping + assert index2._title_mapping == index._title_mapping + assert index2._objtypes == {} + assert index2._objnames == {} + + # freeze after load + assert index2.freeze() == index.freeze() + assert index2._objtypes == index._objtypes + assert index2._objnames == index._objnames + + # prune + index.prune(['docname1_2', 'docname2_2']) + assert index._titles == {'docname1_2': 'title1_2', 'docname2_2': 'title2_2'} + assert index._filenames == {'docname1_2': 'filename1_2', 'docname2_2': 'filename2_2'} + assert index._mapping == { + 'ar': {'docname1_2', 'docname2_2'}, + 'fermion': {'docname1_2', 'docname2_2'}, + 'comment': {'docname1_2', 'docname2_2'}, + 'non': {'docname1_2', 'docname2_2'}, + 'index': {'docname1_2', 'docname2_2'}, + 'test': {'docname1_2', 'docname2_2'}, + } + assert index._title_mapping == {'section_titl': {'docname1_2', 'docname2_2'}} + assert index._objtypes == {('dummy1', 'objtype1'): 0, ('dummy2', 'objtype1'): 1} + assert index._objnames == {0: ('dummy1', 'objtype1', 'objtype1'), 1: ('dummy2', 'objtype1', 'objtype1')} + + # freeze after prune + assert index.freeze() == { + 'docnames': ('docname1_2', 'docname2_2'), + 'envversion': '1.0', + 'filenames': ['filename1_2', 'filename2_2'], + 'objects': {}, + 'objnames': {0: ('dummy1', 'objtype1', 'objtype1'), 1: ('dummy2', 'objtype1', 'objtype1')}, + 'objtypes': {0: 'dummy1:objtype1', 1: 'dummy2:objtype1'}, + 'terms': {'ar': [0, 1], + 'comment': [0, 1], + 'fermion': [0, 1], + 'index': [0, 1], + 'non': [0, 1], + 'test': [0, 1]}, + 'titles': ('title1_2', 'title2_2'), + 'titleterms': {'section_titl': [0, 1]}, + 'alltitles': {'section_title': [(0, 'section-title'), (1, 'section-title')]}, + 'indexentries': {}, + } + assert index._objtypes == {('dummy1', 'objtype1'): 0, ('dummy2', 'objtype1'): 1} + assert index._objnames == {0: ('dummy1', 'objtype1', 'objtype1'), + 1: ('dummy2', 'objtype1', 'objtype1')} + + +def test_IndexBuilder_lookup(): + env = DummyEnvironment('1.0', {}) + + # zh + index = IndexBuilder(env, 'zh', {}, None) + assert index.lang.lang == 'zh' + + # zh_CN + index = IndexBuilder(env, 'zh_CN', {}, None) + assert index.lang.lang == 'zh' + + +@pytest.mark.sphinx( + testroot='search', + confoverrides={'html_search_language': 'zh'}, + srcdir='search_zh', +) +def test_search_index_gen_zh(app): + app.builder.build_all() + index = load_searchindex(app.outdir / 'searchindex.js') + assert 'chinesetest ' not in index['terms'] + assert 'chinesetest' in index['terms'] + assert 'chinesetesttwo' in index['terms'] + assert 'cas' in index['terms'] + + +@pytest.mark.sphinx(testroot='search', freshenv=True) +def test_nosearch(app): + app.build() + index = load_searchindex(app.outdir / 'searchindex.js') + assert index['docnames'] == ['index', 'nosearch', 'tocitem'] + assert 'latex' not in index['terms'] + assert 'bat' in index['terms'] + # bat is indexed from 'index.rst' and 'tocitem.rst' (document IDs 0, 2), and + # not from 'nosearch.rst' (document ID 1) + assert index['terms']['bat'] == [0, 2] + + +@pytest.mark.sphinx(testroot='search', parallel=3, freshenv=True) +def test_parallel(app): + app.build() + index = load_searchindex(app.outdir / 'searchindex.js') + assert index['docnames'] == ['index', 'nosearch', 'tocitem'] diff --git a/tests/test_smartquotes.py b/tests/test_smartquotes.py new file mode 100644 index 0000000..1d4e8e1 --- /dev/null +++ b/tests/test_smartquotes.py @@ -0,0 +1,99 @@ +"""Test smart quotes.""" + +import pytest +from html5lib import HTMLParser + + +@pytest.mark.sphinx(buildername='html', testroot='smartquotes', freshenv=True) +def test_basic(app, status, warning): + app.build() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert '<p>– “Sphinx” is a tool that makes it easy …</p>' in content + + +@pytest.mark.sphinx(buildername='html', testroot='smartquotes', freshenv=True) +def test_literals(app, status, warning): + app.build() + + with (app.outdir / 'literals.html').open(encoding='utf-8') as html_file: + etree = HTMLParser(namespaceHTMLElements=False).parse(html_file) + + for code_element in etree.iter('code'): + code_text = ''.join(code_element.itertext()) + + if code_text.startswith('code role'): + assert "'quotes'" in code_text + elif code_text.startswith('{'): + assert code_text == "{'code': 'role', 'with': 'quotes'}" + elif code_text.startswith('literal'): + assert code_text == "literal with 'quotes'" + + +@pytest.mark.sphinx(buildername='text', testroot='smartquotes', freshenv=True) +def test_text_builder(app, status, warning): + app.build() + + content = (app.outdir / 'index.txt').read_text(encoding='utf8') + assert '-- "Sphinx" is a tool that makes it easy ...' in content + + +@pytest.mark.sphinx(buildername='man', testroot='smartquotes', freshenv=True) +def test_man_builder(app, status, warning): + app.build() + + content = (app.outdir / 'python.1').read_text(encoding='utf8') + assert r'\-\- \(dqSphinx\(dq is a tool that makes it easy ...' in content + + +@pytest.mark.sphinx(buildername='latex', testroot='smartquotes', freshenv=True) +def test_latex_builder(app, status, warning): + app.build() + + content = (app.outdir / 'python.tex').read_text(encoding='utf8') + assert '\\textendash{} “Sphinx” is a tool that makes it easy …' in content + + +@pytest.mark.sphinx(buildername='html', testroot='smartquotes', freshenv=True, + confoverrides={'language': 'ja'}) +def test_ja_html_builder(app, status, warning): + app.build() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert '<p>-- "Sphinx" is a tool that makes it easy ...</p>' in content + + +@pytest.mark.sphinx(buildername='html', testroot='smartquotes', freshenv=True, + confoverrides={'smartquotes': False}) +def test_smartquotes_disabled(app, status, warning): + app.build() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert '<p>-- "Sphinx" is a tool that makes it easy ...</p>' in content + + +@pytest.mark.sphinx(buildername='html', testroot='smartquotes', freshenv=True, + confoverrides={'smartquotes_action': 'q'}) +def test_smartquotes_action(app, status, warning): + app.build() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert '<p>-- “Sphinx” is a tool that makes it easy ...</p>' in content + + +@pytest.mark.sphinx(buildername='html', testroot='smartquotes', freshenv=True, + confoverrides={'language': 'ja', 'smartquotes_excludes': {}}) +def test_smartquotes_excludes_language(app, status, warning): + app.build() + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert '<p>– 「Sphinx」 is a tool that makes it easy …</p>' in content + + +@pytest.mark.sphinx(buildername='man', testroot='smartquotes', freshenv=True, + confoverrides={'smartquotes_excludes': {}}) +def test_smartquotes_excludes_builders(app, status, warning): + app.build() + + content = (app.outdir / 'python.1').read_text(encoding='utf8') + assert '– “Sphinx” is a tool that makes it easy …' in content diff --git a/tests/test_templating.py b/tests/test_templating.py new file mode 100644 index 0000000..a41af93 --- /dev/null +++ b/tests/test_templating.py @@ -0,0 +1,41 @@ +"""Test templating.""" + +import pytest + +from sphinx.ext.autosummary.generate import setup_documenters + + +@pytest.mark.sphinx('html', testroot='templating') +def test_layout_overloading(make_app, app_params): + args, kwargs = app_params + app = make_app(*args, **kwargs) + setup_documenters(app) + app.builder.build_update() + + result = (app.outdir / 'index.html').read_text(encoding='utf8') + assert '<!-- layout overloading -->' in result + + +@pytest.mark.sphinx('html', testroot='templating') +def test_autosummary_class_template_overloading(make_app, app_params): + args, kwargs = app_params + app = make_app(*args, **kwargs) + setup_documenters(app) + app.builder.build_update() + + result = (app.outdir / 'generated' / 'sphinx.application.TemplateBridge.html').read_text(encoding='utf8') + assert 'autosummary/class.rst method block overloading' in result + assert 'foobar' not in result + + +@pytest.mark.sphinx('html', testroot='templating', + confoverrides={'autosummary_context': {'sentence': 'foobar'}}) +def test_autosummary_context(make_app, app_params): + args, kwargs = app_params + app = make_app(*args, **kwargs) + setup_documenters(app) + app.builder.build_update() + + result = (app.outdir / 'generated' / 'sphinx.application.TemplateBridge.html').read_text(encoding='utf8') + assert 'autosummary/class.rst method block overloading' in result + assert 'foobar' in result diff --git a/tests/test_theming.py b/tests/test_theming.py new file mode 100644 index 0000000..b4c8511 --- /dev/null +++ b/tests/test_theming.py @@ -0,0 +1,131 @@ +"""Test the Theme class.""" + +import os + +import alabaster +import pytest + +import sphinx.builders.html +from sphinx.theming import ThemeError + + +@pytest.mark.sphinx( + testroot='theming', + confoverrides={'html_theme': 'ziptheme', + 'html_theme_options.testopt': 'foo'}) +def test_theme_api(app, status, warning): + cfg = app.config + + themes = ['basic', 'default', 'scrolls', 'agogo', 'sphinxdoc', 'haiku', + 'traditional', 'epub', 'nature', 'pyramid', 'bizstyle', 'classic', 'nonav', + 'test-theme', 'ziptheme', 'staticfiles', 'parent', 'child'] + try: + alabaster_version = alabaster.__version_info__ + except AttributeError: + alabaster_version = alabaster.version.__version_info__ + if alabaster_version >= (0, 7, 11): + themes.append('alabaster') + + # test Theme class API + assert set(app.registry.html_themes.keys()) == set(themes) + assert app.registry.html_themes['test-theme'] == str(app.srcdir / 'test_theme' / 'test-theme') + assert app.registry.html_themes['ziptheme'] == str(app.srcdir / 'ziptheme.zip') + assert app.registry.html_themes['staticfiles'] == str(app.srcdir / 'test_theme' / 'staticfiles') + + # test Theme instance API + theme = app.builder.theme + assert theme.name == 'ziptheme' + themedir = theme.themedir + assert theme.base.name == 'basic' + assert len(theme.get_theme_dirs()) == 2 + + # direct setting + assert theme.get_config('theme', 'stylesheet') == 'custom.css' + # inherited setting + assert theme.get_config('options', 'nosidebar') == 'false' + # nonexisting setting + assert theme.get_config('theme', 'foobar', 'def') == 'def' + with pytest.raises(ThemeError): + theme.get_config('theme', 'foobar') + + # options API + + options = theme.get_options({'nonexisting': 'foo'}) + assert 'nonexisting' not in options + + options = theme.get_options(cfg.html_theme_options) + assert options['testopt'] == 'foo' + assert options['nosidebar'] == 'false' + + # cleanup temp directories + theme.cleanup() + assert not os.path.exists(themedir) + + +@pytest.mark.sphinx(testroot='double-inheriting-theme') +def test_double_inheriting_theme(app, status, warning): + assert app.builder.theme.name == 'base_theme2' + app.build() # => not raises TemplateNotFound + + +@pytest.mark.sphinx(testroot='theming', + confoverrides={'html_theme': 'child'}) +def test_nested_zipped_theme(app, status, warning): + assert app.builder.theme.name == 'child' + app.build() # => not raises TemplateNotFound + + +@pytest.mark.sphinx(testroot='theming', + confoverrides={'html_theme': 'staticfiles'}) +def test_staticfiles(app, status, warning): + app.build() + assert (app.outdir / '_static' / 'staticimg.png').exists() + assert (app.outdir / '_static' / 'statictmpl.html').exists() + assert (app.outdir / '_static' / 'statictmpl.html').read_text(encoding='utf8') == ( + '<!-- testing static templates -->\n' + '<html><project>Python</project></html>' + ) + + result = (app.outdir / 'index.html').read_text(encoding='utf8') + assert '<meta name="testopt" content="optdefault" />' in result + + +@pytest.mark.sphinx(testroot='theming', + confoverrides={'html_theme': 'test-theme'}) +def test_dark_style(app, monkeypatch): + monkeypatch.setattr(sphinx.builders.html, '_file_checksum', lambda o, f: '') + + style = app.builder.dark_highlighter.formatter_args.get('style') + assert style.__name__ == 'MonokaiStyle' + + app.build() + assert (app.outdir / '_static' / 'pygments_dark.css').exists() + + css_file, properties = app.registry.css_files[0] + assert css_file == 'pygments_dark.css' + assert "media" in properties + assert properties["media"] == '(prefers-color-scheme: dark)' + + assert sorted(f.filename for f in app.builder._css_files) == [ + '_static/classic.css', + '_static/pygments.css', + '_static/pygments_dark.css', + ] + + result = (app.outdir / 'index.html').read_text(encoding='utf8') + assert '<link rel="stylesheet" type="text/css" href="_static/pygments.css" />' in result + assert ('<link id="pygments_dark_css" media="(prefers-color-scheme: dark)" ' + 'rel="stylesheet" type="text/css" ' + 'href="_static/pygments_dark.css" />') in result + + +@pytest.mark.sphinx(testroot='theming') +def test_theme_sidebars(app, status, warning): + app.build() + + # test-theme specifies globaltoc and searchbox as default sidebars + result = (app.outdir / 'index.html').read_text(encoding='utf8') + assert '<h3><a href="#">Table of Contents</a></h3>' in result + assert '<h3>Related Topics</h3>' not in result + assert '<h3>This Page</h3>' not in result + assert '<h3 id="searchlabel">Quick search</h3>' in result diff --git a/tests/test_toctree.py b/tests/test_toctree.py new file mode 100644 index 0000000..39d0916 --- /dev/null +++ b/tests/test_toctree.py @@ -0,0 +1,39 @@ +"""Test the HTML builder and check output against XPath.""" +import re + +import pytest + + +@pytest.mark.sphinx(testroot='toctree-glob') +def test_relations(app, status, warning): + app.builder.build_all() + assert app.builder.relations['index'] == [None, None, 'foo'] + assert app.builder.relations['foo'] == ['index', 'index', 'bar/index'] + assert app.builder.relations['bar/index'] == ['index', 'foo', 'bar/bar_1'] + assert app.builder.relations['bar/bar_1'] == ['bar/index', 'bar/index', 'bar/bar_2'] + assert app.builder.relations['bar/bar_2'] == ['bar/index', 'bar/bar_1', 'bar/bar_3'] + assert app.builder.relations['bar/bar_3'] == ['bar/index', 'bar/bar_2', 'bar/bar_4/index'] + assert app.builder.relations['bar/bar_4/index'] == ['bar/index', 'bar/bar_3', 'baz'] + assert app.builder.relations['baz'] == ['index', 'bar/bar_4/index', 'qux/index'] + assert app.builder.relations['qux/index'] == ['index', 'baz', 'qux/qux_1'] + assert app.builder.relations['qux/qux_1'] == ['qux/index', 'qux/index', 'qux/qux_2'] + assert app.builder.relations['qux/qux_2'] == ['qux/index', 'qux/qux_1', None] + assert 'quux' not in app.builder.relations + + +@pytest.mark.sphinx('singlehtml', testroot='toctree-empty') +def test_singlehtml_toctree(app, status, warning): + app.builder.build_all() + try: + app.builder._get_local_toctree('index') + except AttributeError: + pytest.fail('Unexpected AttributeError in app.builder.fix_refuris') + + +@pytest.mark.sphinx(testroot='toctree', srcdir="numbered-toctree") +def test_numbered_toctree(app, status, warning): + # give argument to :numbered: option + index = (app.srcdir / 'index.rst').read_text(encoding='utf8') + index = re.sub(':numbered:.*', ':numbered: 1', index) + (app.srcdir / 'index.rst').write_text(index, encoding='utf8') + app.builder.build_all() diff --git a/tests/test_transforms_move_module_targets.py b/tests/test_transforms_move_module_targets.py new file mode 100644 index 0000000..e0e9f1d --- /dev/null +++ b/tests/test_transforms_move_module_targets.py @@ -0,0 +1,77 @@ +import pytest +from docutils import nodes + +from sphinx import addnodes +from sphinx.testing.util import SphinxTestApp +from sphinx.transforms import MoveModuleTargets + +CONTENT_PY = """\ +move-module-targets +=================== + +.. py:module:: fish_licence.halibut +""" +CONTENT_JS = """\ +move-module-targets +=================== + +.. js:module:: fish_licence.halibut +""" + + +@pytest.mark.parametrize('content', [ + CONTENT_PY, # Python + CONTENT_JS, # JavaScript +]) +@pytest.mark.usefixtures("rollback_sysmodules") +def test_move_module_targets(tmp_path, content): + # Test for the MoveModuleTargets transform + tmp_path.joinpath("conf.py").touch() + tmp_path.joinpath("index.rst").write_text(content, encoding="utf-8") + + app = SphinxTestApp('dummy', srcdir=tmp_path) + app.build(force_all=True) + document = app.env.get_doctree('index') + section = document[0] + + # target ID has been lifted into the section node + assert section["ids"] == ['module-fish_licence.halibut', 'move-module-targets'] + # nodes.target has been removed from 'section' + assert isinstance(section[0], nodes.title) + assert isinstance(section[1], addnodes.index) + assert len(section) == 2 + + +@pytest.mark.usefixtures("rollback_sysmodules") +def test_move_module_targets_no_section(tmp_path): + # Test for the MoveModuleTargets transform + tmp_path.joinpath("conf.py").touch() + tmp_path.joinpath("index.rst").write_text(".. py:module:: fish_licence.halibut\n", encoding="utf-8") + + app = SphinxTestApp('dummy', srcdir=tmp_path) + app.build(force_all=True) + document = app.env.get_doctree('index') + + assert document["ids"] == [] + + +@pytest.mark.usefixtures("rollback_sysmodules") +def test_move_module_targets_disabled(tmp_path): + # Test for the MoveModuleTargets transform + tmp_path.joinpath("conf.py").touch() + tmp_path.joinpath("index.rst").write_text(CONTENT_PY, encoding="utf-8") + + app = SphinxTestApp('dummy', srcdir=tmp_path) + app.registry.transforms.remove(MoveModuleTargets) # disable the transform + app.build(force_all=True) + document = app.env.get_doctree('index') + section = document[0] + + # target ID is not lifted into the section node + assert section["ids"] == ['move-module-targets'] + assert section[2]["ids"] == ['module-fish_licence.halibut'] + # nodes.target remains in 'section' + assert isinstance(section[0], nodes.title) + assert isinstance(section[1], addnodes.index) + assert isinstance(section[2], nodes.target) + assert len(section) == 3 diff --git a/tests/test_transforms_post_transforms.py b/tests/test_transforms_post_transforms.py new file mode 100644 index 0000000..b9b6126 --- /dev/null +++ b/tests/test_transforms_post_transforms.py @@ -0,0 +1,268 @@ +"""Tests the post_transforms""" + +from __future__ import annotations + +import logging +from typing import TYPE_CHECKING + +import pytest +from docutils import nodes + +from sphinx import addnodes +from sphinx.addnodes import SIG_ELEMENTS +from sphinx.testing.util import assert_node +from sphinx.transforms.post_transforms import SigElementFallbackTransform +from sphinx.util.docutils import new_document + +if TYPE_CHECKING: + from typing import Any, NoReturn + + from _pytest.fixtures import SubRequest + + from sphinx.testing.util import SphinxTestApp + + +@pytest.mark.sphinx('html', testroot='transforms-post_transforms-missing-reference') +def test_nitpicky_warning(app, warning): + app.build() + assert ('index.rst:4: WARNING: py:class reference target ' + 'not found: io.StringIO' in warning.getvalue()) + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert ('<p><code class="xref py py-class docutils literal notranslate"><span class="pre">' + 'io.StringIO</span></code></p>' in content) + + +@pytest.mark.sphinx('html', testroot='transforms-post_transforms-missing-reference', + freshenv=True) +def test_missing_reference(app, warning): + def missing_reference(app_, env_, node_, contnode_): + assert app_ is app + assert env_ is app.env + assert node_['reftarget'] == 'io.StringIO' + assert contnode_.astext() == 'io.StringIO' + + return nodes.inline('', 'missing-reference.StringIO') + + warning.truncate(0) + app.connect('missing-reference', missing_reference) + app.build() + assert warning.getvalue() == '' + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert '<p><span>missing-reference.StringIO</span></p>' in content + + +@pytest.mark.sphinx('html', testroot='domain-py-python_use_unqualified_type_names', + freshenv=True) +def test_missing_reference_conditional_pending_xref(app, warning): + def missing_reference(_app, _env, _node, contnode): + return contnode + + warning.truncate(0) + app.connect('missing-reference', missing_reference) + app.build() + assert warning.getvalue() == '' + + content = (app.outdir / 'index.html').read_text(encoding='utf8') + assert '<span class="n"><span class="pre">Age</span></span>' in content + + +@pytest.mark.sphinx('html', testroot='transforms-post_transforms-keyboard', + freshenv=True) +def test_keyboard_hyphen_spaces(app): + """Regression test for issue 10495, we want no crash.""" + app.build() + assert "spanish" in (app.outdir / 'index.html').read_text(encoding='utf8') + assert "inquisition" in (app.outdir / 'index.html').read_text(encoding='utf8') + + +class TestSigElementFallbackTransform: + """Integration test for :class:`sphinx.transforms.post_transforms.SigElementFallbackTransform`.""" + # safe copy of the "built-in" desc_sig_* nodes (during the test, instances of such nodes + # will be created sequentially, so we fix a possible order at the beginning using a tuple) + _builtin_sig_elements: tuple[type[addnodes.desc_sig_element], ...] = tuple(SIG_ELEMENTS) + + @pytest.fixture(autouse=True) + def builtin_sig_elements(self) -> tuple[type[addnodes.desc_sig_element], ...]: + """Fixture returning an ordered view on the original value of :data:`!sphinx.addnodes.SIG_ELEMENTS`.""" + return self._builtin_sig_elements + + @pytest.fixture() + def document( + self, app: SphinxTestApp, builtin_sig_elements: tuple[type[addnodes.desc_sig_element], ...], + ) -> nodes.document: + """Fixture returning a new document with built-in ``desc_sig_*`` nodes and a final ``desc_inline`` node.""" + doc = new_document('') + doc.settings.env = app.env + # Nodes that should be supported by a default custom translator class. + # It is important that builtin_sig_elements has a fixed order so that + # the nodes can be deterministically checked. + doc += [node_type('', '') for node_type in builtin_sig_elements] + doc += addnodes.desc_inline('py') + return doc + + @pytest.fixture() + def with_desc_sig_elements(self, value: Any) -> bool: + """Dynamic fixture acting as the identity on booleans.""" + assert isinstance(value, bool) + return value + + @pytest.fixture() + def add_visitor_method_for(self, value: Any) -> list[str]: + """Dynamic fixture acting as the identity on a list of strings.""" + assert isinstance(value, list) + assert all(isinstance(item, str) for item in value) + return value + + @pytest.fixture(autouse=True) + def translator_class(self, request: SubRequest) -> type[nodes.NodeVisitor]: + """Minimal interface fixture similar to SphinxTranslator but orthogonal thereof.""" + logger = logging.getLogger(__name__) + + class BaseCustomTranslatorClass(nodes.NodeVisitor): + """Base class for a custom translator class, orthogonal to ``SphinxTranslator``.""" + + def __init__(self, document, *_a): + super().__init__(document) + # ignore other arguments + + def dispatch_visit(self, node): + for node_class in node.__class__.__mro__: + if method := getattr(self, f'visit_{node_class.__name__}', None): + method(node) + break + else: + logger.info('generic visit: %r', node.__class__.__name__) + super().dispatch_visit(node) + + def unknown_visit(self, node): + logger.warning('unknown visit: %r', node.__class__.__name__) + raise nodes.SkipDeparture # ignore unknown departure + + def visit_document(self, node): + raise nodes.SkipDeparture # ignore departure + + def mark_node(self, node: nodes.Node) -> NoReturn: + logger.info('mark: %r', node.__class__.__name__) + raise nodes.SkipDeparture # ignore departure + + with_desc_sig_elements = request.getfixturevalue('with_desc_sig_elements') + if with_desc_sig_elements: + desc_sig_elements_list = request.getfixturevalue('builtin_sig_elements') + else: + desc_sig_elements_list = [] + add_visitor_method_for = request.getfixturevalue('add_visitor_method_for') + visitor_methods = {f'visit_{tp.__name__}' for tp in desc_sig_elements_list} + visitor_methods.update(f'visit_{name}' for name in add_visitor_method_for) + class_dict = dict.fromkeys(visitor_methods, BaseCustomTranslatorClass.mark_node) + return type('CustomTranslatorClass', (BaseCustomTranslatorClass,), class_dict) # type: ignore[return-value] + + @pytest.mark.parametrize( + 'add_visitor_method_for', + [[], ['desc_inline']], + ids=[ + 'no_explicit_visitor', + 'explicit_desc_inline_visitor', + ], + ) + @pytest.mark.parametrize( + 'with_desc_sig_elements', + [True, False], + ids=[ + 'with_default_visitors_for_desc_sig_elements', + 'without_default_visitors_for_desc_sig_elements', + ], + ) + @pytest.mark.sphinx('dummy') + def test_support_desc_inline( + self, document: nodes.document, with_desc_sig_elements: bool, + add_visitor_method_for: list[str], request: SubRequest, + ) -> None: + document, _, _ = self._exec(request) + # count the number of desc_inline nodes with the extra _sig_node_type field + desc_inline_typename = addnodes.desc_inline.__name__ + visit_desc_inline = desc_inline_typename in add_visitor_method_for + if visit_desc_inline: + assert_node(document[-1], addnodes.desc_inline) + else: + assert_node(document[-1], nodes.inline, _sig_node_type=desc_inline_typename) + + @pytest.mark.parametrize( + 'add_visitor_method_for', + [ + [], # no support + ['desc_sig_space'], # enable desc_sig_space visitor + ['desc_sig_element'], # enable generic visitor + ['desc_sig_space', 'desc_sig_element'], # enable desc_sig_space and generic visitors + ], + ids=[ + 'no_explicit_visitor', + 'explicit_desc_sig_space_visitor', + 'explicit_desc_sig_element_visitor', + 'explicit_desc_sig_space_and_desc_sig_element_visitors', + ], + ) + @pytest.mark.parametrize( + 'with_desc_sig_elements', + [True, False], + ids=[ + 'with_default_visitors_for_desc_sig_elements', + 'without_default_visitors_for_desc_sig_elements', + ], + ) + @pytest.mark.sphinx('dummy') + def test_custom_implementation( + self, + document: nodes.document, + with_desc_sig_elements: bool, + add_visitor_method_for: list[str], + request: SubRequest, + ) -> None: + document, stdout, stderr = self._exec(request) + assert len(self._builtin_sig_elements) == len(document.children[:-1]) == len(stdout[:-1]) + + visit_desc_sig_element = addnodes.desc_sig_element.__name__ in add_visitor_method_for + ignore_sig_element_fallback_transform = visit_desc_sig_element or with_desc_sig_elements + + if ignore_sig_element_fallback_transform: + # desc_sig_element is implemented or desc_sig_* nodes are properly handled (and left untouched) + for node_type, node, mess in zip(self._builtin_sig_elements, document.children[:-1], stdout[:-1]): + assert_node(node, node_type) + assert not node.hasattr('_sig_node_type') + assert mess == f'mark: {node_type.__name__!r}' + else: + # desc_sig_* nodes are converted into inline nodes + for node_type, node, mess in zip(self._builtin_sig_elements, document.children[:-1], stdout[:-1]): + assert_node(node, nodes.inline, _sig_node_type=node_type.__name__) + assert mess == f'generic visit: {nodes.inline.__name__!r}' + + # desc_inline node is never handled and always transformed + assert addnodes.desc_inline.__name__ not in add_visitor_method_for + assert_node(document[-1], nodes.inline, _sig_node_type=addnodes.desc_inline.__name__) + assert stdout[-1] == f'generic visit: {nodes.inline.__name__!r}' + + # nodes.inline are never handled + assert len(stderr) == 1 if ignore_sig_element_fallback_transform else len(document.children) + assert set(stderr) == {f'unknown visit: {nodes.inline.__name__!r}'} + + @staticmethod + def _exec(request: SubRequest) -> tuple[nodes.document, list[str], list[str]]: + caplog = request.getfixturevalue('caplog') + caplog.set_level(logging.INFO, logger=__name__) + + app = request.getfixturevalue('app') + translator_class = request.getfixturevalue('translator_class') + app.set_translator('dummy', translator_class) + # run the post-transform directly [building phase] + # document contains SIG_ELEMENTS nodes followed by a desc_inline node + document = request.getfixturevalue('document') + SigElementFallbackTransform(document).run() + # run the translator [writing phase] + translator = translator_class(document, app.builder) + document.walkabout(translator) + # extract messages + messages = caplog.record_tuples + stdout = [message for _, lvl, message in messages if lvl == logging.INFO] + stderr = [message for _, lvl, message in messages if lvl == logging.WARN] + return document, stdout, stderr diff --git a/tests/test_transforms_post_transforms_code.py b/tests/test_transforms_post_transforms_code.py new file mode 100644 index 0000000..4423d5b --- /dev/null +++ b/tests/test_transforms_post_transforms_code.py @@ -0,0 +1,44 @@ +import pytest + + +@pytest.mark.sphinx('html', testroot='trim_doctest_flags') +def test_trim_doctest_flags_html(app, status, warning): + app.build() + + result = (app.outdir / 'index.html').read_text(encoding='utf8') + assert 'FOO' not in result + assert 'BAR' in result + assert 'BAZ' not in result + assert 'QUX' not in result + assert 'QUUX' not in result + assert 'CORGE' not in result + assert 'GRAULT' in result + + +@pytest.mark.sphinx('html', testroot='trim_doctest_flags', + confoverrides={'trim_doctest_flags': False}) +def test_trim_doctest_flags_disabled(app, status, warning): + app.build() + + result = (app.outdir / 'index.html').read_text(encoding='utf8') + assert 'FOO' in result + assert 'BAR' in result + assert 'BAZ' in result + assert 'QUX' in result + assert 'QUUX' not in result + assert 'CORGE' not in result + assert 'GRAULT' in result + + +@pytest.mark.sphinx('latex', testroot='trim_doctest_flags') +def test_trim_doctest_flags_latex(app, status, warning): + app.build() + + result = (app.outdir / 'python.tex').read_text(encoding='utf8') + assert 'FOO' not in result + assert 'BAR' in result + assert 'BAZ' not in result + assert 'QUX' not in result + assert 'QUUX' not in result + assert 'CORGE' not in result + assert 'GRAULT' in result diff --git a/tests/test_transforms_reorder_nodes.py b/tests/test_transforms_reorder_nodes.py new file mode 100644 index 0000000..7ffdae6 --- /dev/null +++ b/tests/test_transforms_reorder_nodes.py @@ -0,0 +1,96 @@ +"""Tests the transformations""" + +from docutils import nodes + +from sphinx import addnodes +from sphinx.testing import restructuredtext +from sphinx.testing.util import assert_node + + +def test_transforms_reorder_consecutive_target_and_index_nodes_preserve_order(app): + text = ('.. index:: abc\n' + '.. index:: def\n' + '.. index:: ghi\n' + '.. index:: jkl\n' + '\n' + 'text\n') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + addnodes.index, + addnodes.index, + addnodes.index, + nodes.target, + nodes.target, + nodes.target, + nodes.target, + nodes.paragraph)) + assert_node(doctree[0], addnodes.index, entries=[('single', 'abc', 'index-0', '', None)]) + assert_node(doctree[1], addnodes.index, entries=[('single', 'def', 'index-1', '', None)]) + assert_node(doctree[2], addnodes.index, entries=[('single', 'ghi', 'index-2', '', None)]) + assert_node(doctree[3], addnodes.index, entries=[('single', 'jkl', 'index-3', '', None)]) + assert_node(doctree[4], nodes.target, refid='index-0') + assert_node(doctree[5], nodes.target, refid='index-1') + assert_node(doctree[6], nodes.target, refid='index-2') + assert_node(doctree[7], nodes.target, refid='index-3') + # assert_node(doctree[8], nodes.paragraph) + + +def test_transforms_reorder_consecutive_target_and_index_nodes_no_merge_across_other_nodes(app): + text = ('.. index:: abc\n' + '.. index:: def\n' + '\n' + 'text\n' + '\n' + '.. index:: ghi\n' + '.. index:: jkl\n' + '\n' + 'text\n') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (addnodes.index, + addnodes.index, + nodes.target, + nodes.target, + nodes.paragraph, + addnodes.index, + addnodes.index, + nodes.target, + nodes.target, + nodes.paragraph)) + assert_node(doctree[0], addnodes.index, entries=[('single', 'abc', 'index-0', '', None)]) + assert_node(doctree[1], addnodes.index, entries=[('single', 'def', 'index-1', '', None)]) + assert_node(doctree[2], nodes.target, refid='index-0') + assert_node(doctree[3], nodes.target, refid='index-1') + # assert_node(doctree[4], nodes.paragraph) + assert_node(doctree[5], addnodes.index, entries=[('single', 'ghi', 'index-2', '', None)]) + assert_node(doctree[6], addnodes.index, entries=[('single', 'jkl', 'index-3', '', None)]) + assert_node(doctree[7], nodes.target, refid='index-2') + assert_node(doctree[8], nodes.target, refid='index-3') + # assert_node(doctree[9], nodes.paragraph) + + +def test_transforms_reorder_consecutive_target_and_index_nodes_merge_with_labels(app): + text = ('.. _abc:\n' + '.. index:: def\n' + '.. _ghi:\n' + '.. index:: jkl\n' + '.. _mno:\n' + '\n' + 'Heading\n' + '=======\n') + doctree = restructuredtext.parse(app, text) + assert_node(doctree, (nodes.title, + addnodes.index, + addnodes.index, + nodes.target, + nodes.target, + nodes.target, + nodes.target, + nodes.target)) + # assert_node(doctree[8], nodes.title) + assert_node(doctree[1], addnodes.index, entries=[('single', 'def', 'index-0', '', None)]) + assert_node(doctree[2], addnodes.index, entries=[('single', 'jkl', 'index-1', '', None)]) + assert_node(doctree[3], nodes.target, refid='abc') + assert_node(doctree[4], nodes.target, refid='index-0') + assert_node(doctree[5], nodes.target, refid='ghi') + assert_node(doctree[6], nodes.target, refid='index-1') + assert_node(doctree[7], nodes.target, refid='mno') diff --git a/tests/test_util.py b/tests/test_util.py new file mode 100644 index 0000000..4389894 --- /dev/null +++ b/tests/test_util.py @@ -0,0 +1,73 @@ +"""Tests util functions.""" + +import os +import tempfile + +import pytest + +from sphinx.errors import ExtensionError +from sphinx.util import encode_uri, ensuredir, import_object, parselinenos + + +def test_encode_uri(): + expected = ('https://ru.wikipedia.org/wiki/%D0%A1%D0%B8%D1%81%D1%82%D0%B5%D0%BC%D0%B0_' + '%D1%83%D0%BF%D1%80%D0%B0%D0%B2%D0%BB%D0%B5%D0%BD%D0%B8%D1%8F_' + '%D0%B1%D0%B0%D0%B7%D0%B0%D0%BC%D0%B8_%D0%B4%D0%B0%D0%BD%D0%BD%D1%8B%D1%85') + uri = ('https://ru.wikipedia.org/wiki' + '/Система_управления_базами_данных') + assert expected == encode_uri(uri) + + expected = ('https://github.com/search?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen+is%3A' + 'sprint-friendly+user%3Ajupyter&type=Issues&ref=searchresults') + uri = ('https://github.com/search?utf8=✓&q=is%3Aissue+is%3Aopen+is%3A' + 'sprint-friendly+user%3Ajupyter&type=Issues&ref=searchresults') + assert expected == encode_uri(uri) + + +def test_ensuredir(): + with tempfile.TemporaryDirectory() as tmp_path: + # Does not raise an exception for an existing directory. + ensuredir(tmp_path) + + path = os.path.join(tmp_path, 'a', 'b', 'c') + ensuredir(path) + assert os.path.isdir(path) + + +def test_import_object(): + module = import_object('sphinx') + assert module.__name__ == 'sphinx' + + module = import_object('sphinx.application') + assert module.__name__ == 'sphinx.application' + + obj = import_object('sphinx.application.Sphinx') + assert obj.__name__ == 'Sphinx' + + with pytest.raises(ExtensionError) as exc: + import_object('sphinx.unknown_module') + assert exc.value.args[0] == 'Could not import sphinx.unknown_module' + + with pytest.raises(ExtensionError) as exc: + import_object('sphinx.unknown_module', 'my extension') + assert exc.value.args[0] == ('Could not import sphinx.unknown_module ' + '(needed for my extension)') + + +def test_parselinenos(): + assert parselinenos('1,2,3', 10) == [0, 1, 2] + assert parselinenos('4, 5, 6', 10) == [3, 4, 5] + assert parselinenos('-4', 10) == [0, 1, 2, 3] + assert parselinenos('7-9', 10) == [6, 7, 8] + assert parselinenos('7-', 10) == [6, 7, 8, 9] + assert parselinenos('1,7-', 10) == [0, 6, 7, 8, 9] + assert parselinenos('7-7', 10) == [6] + assert parselinenos('11-', 10) == [10] + with pytest.raises(ValueError, match="invalid line number spec: '1-2-3'"): + parselinenos('1-2-3', 10) + with pytest.raises(ValueError, match="invalid line number spec: 'abc-def'"): + parselinenos('abc-def', 10) + with pytest.raises(ValueError, match="invalid line number spec: '-'"): + parselinenos('-', 10) + with pytest.raises(ValueError, match="invalid line number spec: '3-1'"): + parselinenos('3-1', 10) diff --git a/tests/test_util_display.py b/tests/test_util_display.py new file mode 100644 index 0000000..9ecdd6a --- /dev/null +++ b/tests/test_util_display.py @@ -0,0 +1,103 @@ +"""Tests util functions.""" + +import pytest + +from sphinx.testing.util import strip_escseq +from sphinx.util import logging +from sphinx.util.display import ( + SkipProgressMessage, + display_chunk, + progress_message, + status_iterator, +) + + +def test_display_chunk(): + assert display_chunk('hello') == 'hello' + assert display_chunk(['hello']) == 'hello' + assert display_chunk(['hello', 'sphinx', 'world']) == 'hello .. world' + assert display_chunk(('hello',)) == 'hello' + assert display_chunk(('hello', 'sphinx', 'world')) == 'hello .. world' + + +@pytest.mark.sphinx('dummy') +def test_status_iterator_length_0(app, status, warning): + logging.setup(app, status, warning) + + # test for status_iterator (length=0) + status.seek(0) + status.truncate(0) + yields = list(status_iterator(['hello', 'sphinx', 'world'], 'testing ... ')) + output = strip_escseq(status.getvalue()) + assert 'testing ... hello sphinx world \n' in output + assert yields == ['hello', 'sphinx', 'world'] + + +@pytest.mark.sphinx('dummy') +def test_status_iterator_verbosity_0(app, status, warning): + logging.setup(app, status, warning) + + # test for status_iterator (verbosity=0) + status.seek(0) + status.truncate(0) + yields = list(status_iterator(['hello', 'sphinx', 'world'], 'testing ... ', + length=3, verbosity=0)) + output = strip_escseq(status.getvalue()) + assert 'testing ... [ 33%] hello\r' in output + assert 'testing ... [ 67%] sphinx\r' in output + assert 'testing ... [100%] world\r\n' in output + assert yields == ['hello', 'sphinx', 'world'] + + +@pytest.mark.sphinx('dummy') +def test_status_iterator_verbosity_1(app, status, warning): + logging.setup(app, status, warning) + + # test for status_iterator (verbosity=1) + status.seek(0) + status.truncate(0) + yields = list(status_iterator(['hello', 'sphinx', 'world'], 'testing ... ', + length=3, verbosity=1)) + output = strip_escseq(status.getvalue()) + assert 'testing ... [ 33%] hello\n' in output + assert 'testing ... [ 67%] sphinx\n' in output + assert 'testing ... [100%] world\n\n' in output + assert yields == ['hello', 'sphinx', 'world'] + + +def test_progress_message(app, status, warning): + logging.setup(app, status, warning) + logger = logging.getLogger(__name__) + + # standard case + with progress_message('testing'): + logger.info('blah ', nonl=True) + + output = strip_escseq(status.getvalue()) + assert 'testing... blah done\n' in output + + # skipping case + with progress_message('testing'): + raise SkipProgressMessage('Reason: %s', 'error') # NoQA: EM101 + + output = strip_escseq(status.getvalue()) + assert 'testing... skipped\nReason: error\n' in output + + # error case + try: + with progress_message('testing'): + raise + except Exception: + pass + + output = strip_escseq(status.getvalue()) + assert 'testing... failed\n' in output + + # decorator + @progress_message('testing') + def func(): + logger.info('in func ', nonl=True) + + func() + output = strip_escseq(status.getvalue()) + assert 'testing... in func done\n' in output diff --git a/tests/test_util_docstrings.py b/tests/test_util_docstrings.py new file mode 100644 index 0000000..813e84e --- /dev/null +++ b/tests/test_util_docstrings.py @@ -0,0 +1,88 @@ +"""Test sphinx.util.docstrings.""" + +from sphinx.util.docstrings import prepare_commentdoc, prepare_docstring, separate_metadata + + +def test_separate_metadata(): + # metadata only + text = (":meta foo: bar\n" + ":meta baz:\n") + docstring, metadata = separate_metadata(text) + assert docstring == '' + assert metadata == {'foo': 'bar', 'baz': ''} + + # non metadata field list item + text = (":meta foo: bar\n" + ":param baz:\n") + docstring, metadata = separate_metadata(text) + assert docstring == ':param baz:\n' + assert metadata == {'foo': 'bar'} + + # field_list like text following just after paragraph is not a field_list + text = ("blah blah blah\n" + ":meta foo: bar\n" + ":meta baz:\n") + docstring, metadata = separate_metadata(text) + assert docstring == text + assert metadata == {} + + # field_list like text following after blank line is a field_list + text = ("blah blah blah\n" + "\n" + ":meta foo: bar\n" + ":meta baz:\n") + docstring, metadata = separate_metadata(text) + assert docstring == "blah blah blah\n\n" + assert metadata == {'foo': 'bar', 'baz': ''} + + # non field_list item breaks field_list + text = (":meta foo: bar\n" + "blah blah blah\n" + ":meta baz:\n") + docstring, metadata = separate_metadata(text) + assert docstring == ("blah blah blah\n" + ":meta baz:\n") + assert metadata == {'foo': 'bar'} + + +def test_prepare_docstring(): + docstring = """multiline docstring + + Lorem ipsum dolor sit amet, consectetur adipiscing elit, + sed do eiusmod tempor incididunt ut labore et dolore magna + aliqua:: + + Ut enim ad minim veniam, quis nostrud exercitation + ullamco laboris nisi ut aliquip ex ea commodo consequat. + """ + + assert (prepare_docstring(docstring) == + ["multiline docstring", + "", + "Lorem ipsum dolor sit amet, consectetur adipiscing elit,", + "sed do eiusmod tempor incididunt ut labore et dolore magna", + "aliqua::", + "", + " Ut enim ad minim veniam, quis nostrud exercitation", + " ullamco laboris nisi ut aliquip ex ea commodo consequat.", + ""]) + + docstring = """ + + multiline docstring with leading empty lines + """ + assert (prepare_docstring(docstring) == + ["multiline docstring with leading empty lines", + ""]) + + docstring = "single line docstring" + assert (prepare_docstring(docstring) == + ["single line docstring", + ""]) + + +def test_prepare_commentdoc(): + assert prepare_commentdoc("hello world") == [] + assert prepare_commentdoc("#: hello world") == ["hello world", ""] + assert prepare_commentdoc("#: hello world") == [" hello world", ""] + assert prepare_commentdoc("#: hello\n#: world\n") == ["hello", "world", ""] diff --git a/tests/test_util_docutils.py b/tests/test_util_docutils.py new file mode 100644 index 0000000..69999eb --- /dev/null +++ b/tests/test_util_docutils.py @@ -0,0 +1,92 @@ +"""Tests util.utils functions.""" + +import os + +from docutils import nodes + +from sphinx.util.docutils import ( + SphinxFileOutput, + SphinxTranslator, + docutils_namespace, + new_document, + register_node, +) + + +def test_register_node(): + class custom_node(nodes.Element): + pass + + with docutils_namespace(): + register_node(custom_node) + + # check registered + assert hasattr(nodes.GenericNodeVisitor, 'visit_custom_node') + assert hasattr(nodes.GenericNodeVisitor, 'depart_custom_node') + assert hasattr(nodes.SparseNodeVisitor, 'visit_custom_node') + assert hasattr(nodes.SparseNodeVisitor, 'depart_custom_node') + + # check unregistered outside namespace + assert not hasattr(nodes.GenericNodeVisitor, 'visit_custom_node') + assert not hasattr(nodes.GenericNodeVisitor, 'depart_custom_node') + assert not hasattr(nodes.SparseNodeVisitor, 'visit_custom_node') + assert not hasattr(nodes.SparseNodeVisitor, 'depart_custom_node') + + +def test_SphinxFileOutput(tmpdir): + content = 'Hello Sphinx World' + + # write test.txt at first + filename = str(tmpdir / 'test.txt') + output = SphinxFileOutput(destination_path=filename) + output.write(content) + os.utime(filename, (0, 0)) + + # overwrite it again + output.write(content) + assert os.stat(filename).st_mtime != 0 # updated + + # write test2.txt at first + filename = str(tmpdir / 'test2.txt') + output = SphinxFileOutput(destination_path=filename, overwrite_if_changed=True) + output.write(content) + os.utime(filename, (0, 0)) + + # overwrite it again + output.write(content) + assert os.stat(filename).st_mtime == 0 # not updated + + # overwrite it again (content changed) + output.write(content + "; content change") + assert os.stat(filename).st_mtime != 0 # updated + + +def test_SphinxTranslator(app): + class CustomNode(nodes.inline): + pass + + class MyTranslator(SphinxTranslator): + def __init__(self, *args): + self.called = [] + super().__init__(*args) + + def visit_document(self, node): + pass + + def depart_document(self, node): + pass + + def visit_inline(self, node): + self.called.append('visit_inline') + + def depart_inline(self, node): + self.called.append('depart_inline') + + document = new_document('') + document += CustomNode() + + translator = MyTranslator(document, app.builder) + document.walkabout(translator) + + # MyTranslator does not have visit_CustomNode. But it calls visit_inline instead. + assert translator.called == ['visit_inline', 'depart_inline'] diff --git a/tests/test_util_fileutil.py b/tests/test_util_fileutil.py new file mode 100644 index 0000000..9c23821 --- /dev/null +++ b/tests/test_util_fileutil.py @@ -0,0 +1,103 @@ +"""Tests sphinx.util.fileutil functions.""" + +from unittest import mock + +from sphinx.jinja2glue import BuiltinTemplateLoader +from sphinx.util.fileutil import copy_asset, copy_asset_file + + +class DummyTemplateLoader(BuiltinTemplateLoader): + def __init__(self): + super().__init__() + builder = mock.Mock() + builder.config.templates_path = [] + builder.app.translator = None + self.init(builder) + + +def test_copy_asset_file(tmp_path): + renderer = DummyTemplateLoader() + + # copy normal file + src = (tmp_path / 'asset.txt') + src.write_text('# test data', encoding='utf8') + dest = (tmp_path / 'output.txt') + + copy_asset_file(src, dest) + assert dest.exists() + assert src.read_text(encoding='utf8') == dest.read_text(encoding='utf8') + + # copy template file + src = (tmp_path / 'asset.txt_t') + src.write_text('# {{var1}} data', encoding='utf8') + dest = (tmp_path / 'output.txt_t') + + copy_asset_file(str(src), str(dest), {'var1': 'template'}, renderer) + assert not dest.exists() + assert (tmp_path / 'output.txt').exists() + assert (tmp_path / 'output.txt').read_text(encoding='utf8') == '# template data' + + # copy template file to subdir + src = (tmp_path / 'asset.txt_t') + src.write_text('# {{var1}} data', encoding='utf8') + subdir1 = (tmp_path / 'subdir') + subdir1.mkdir(parents=True, exist_ok=True) + + copy_asset_file(src, subdir1, {'var1': 'template'}, renderer) + assert (subdir1 / 'asset.txt').exists() + assert (subdir1 / 'asset.txt').read_text(encoding='utf8') == '# template data' + + # copy template file without context + src = (tmp_path / 'asset.txt_t') + subdir2 = (tmp_path / 'subdir2') + subdir2.mkdir(parents=True, exist_ok=True) + + copy_asset_file(src, subdir2) + assert not (subdir2 / 'asset.txt').exists() + assert (subdir2 / 'asset.txt_t').exists() + assert (subdir2 / 'asset.txt_t').read_text(encoding='utf8') == '# {{var1}} data' + + +def test_copy_asset(tmp_path): + renderer = DummyTemplateLoader() + + # prepare source files + source = (tmp_path / 'source') + source.mkdir(parents=True, exist_ok=True) + (source / 'index.rst').write_text('index.rst', encoding='utf8') + (source / 'foo.rst_t').write_text('{{var1}}.rst', encoding='utf8') + (source / '_static').mkdir(parents=True, exist_ok=True) + (source / '_static' / 'basic.css').write_text('basic.css', encoding='utf8') + (source / '_templates').mkdir(parents=True, exist_ok=True) + (source / '_templates' / 'layout.html').write_text('layout.html', encoding='utf8') + (source / '_templates' / 'sidebar.html_t').write_text('sidebar: {{var2}}', encoding='utf8') + + # copy a single file + assert not (tmp_path / 'test1').exists() + copy_asset(source / 'index.rst', tmp_path / 'test1') + assert (tmp_path / 'test1').exists() + assert (tmp_path / 'test1/index.rst').exists() + + # copy directories + destdir = tmp_path / 'test2' + copy_asset(source, destdir, context={'var1': 'bar', 'var2': 'baz'}, renderer=renderer) + assert (destdir / 'index.rst').exists() + assert (destdir / 'foo.rst').exists() + assert (destdir / 'foo.rst').read_text(encoding='utf8') == 'bar.rst' + assert (destdir / '_static' / 'basic.css').exists() + assert (destdir / '_templates' / 'layout.html').exists() + assert (destdir / '_templates' / 'sidebar.html').exists() + assert (destdir / '_templates' / 'sidebar.html').read_text(encoding='utf8') == 'sidebar: baz' + + # copy with exclusion + def excluded(path): + return ('sidebar.html' in path or 'basic.css' in path) + + destdir = tmp_path / 'test3' + copy_asset(source, destdir, excluded, + context={'var1': 'bar', 'var2': 'baz'}, renderer=renderer) + assert (destdir / 'index.rst').exists() + assert (destdir / 'foo.rst').exists() + assert not (destdir / '_static' / 'basic.css').exists() + assert (destdir / '_templates' / 'layout.html').exists() + assert not (destdir / '_templates' / 'sidebar.html').exists() diff --git a/tests/test_util_i18n.py b/tests/test_util_i18n.py new file mode 100644 index 0000000..9a1ecc5 --- /dev/null +++ b/tests/test_util_i18n.py @@ -0,0 +1,190 @@ +"""Test i18n util.""" + +import datetime +import os + +import babel +import pytest +from babel.messages.mofile import read_mo + +from sphinx.errors import SphinxError +from sphinx.util import i18n + +BABEL_VERSION = tuple(map(int, babel.__version__.split('.'))) + + +def test_catalog_info_for_file_and_path(): + cat = i18n.CatalogInfo('path', 'domain', 'utf-8') + assert cat.po_file == 'domain.po' + assert cat.mo_file == 'domain.mo' + assert cat.po_path == os.path.join('path', 'domain.po') + assert cat.mo_path == os.path.join('path', 'domain.mo') + + +def test_catalog_info_for_sub_domain_file_and_path(): + cat = i18n.CatalogInfo('path', 'sub/domain', 'utf-8') + assert cat.po_file == 'sub/domain.po' + assert cat.mo_file == 'sub/domain.mo' + assert cat.po_path == os.path.join('path', 'sub/domain.po') + assert cat.mo_path == os.path.join('path', 'sub/domain.mo') + + +def test_catalog_outdated(tmp_path): + (tmp_path / 'test.po').write_text('#', encoding='utf8') + cat = i18n.CatalogInfo(tmp_path, 'test', 'utf-8') + assert cat.is_outdated() # if mo is not exist + + mo_file = (tmp_path / 'test.mo') + mo_file.write_text('#', encoding='utf8') + assert not cat.is_outdated() # if mo is exist and newer than po + + os.utime(mo_file, (os.stat(mo_file).st_mtime - 10,) * 2) # to be outdate + assert cat.is_outdated() # if mo is exist and older than po + + +def test_catalog_write_mo(tmp_path): + (tmp_path / 'test.po').write_text('#', encoding='utf8') + cat = i18n.CatalogInfo(tmp_path, 'test', 'utf-8') + cat.write_mo('en') + assert os.path.exists(cat.mo_path) + with open(cat.mo_path, 'rb') as f: + assert read_mo(f) is not None + + +def test_format_date(): + date = datetime.date(2016, 2, 7) + + # strftime format + format = '%B %d, %Y' + assert i18n.format_date(format, date=date, language='') == 'February 07, 2016' + assert i18n.format_date(format, date=date, language='unknown') == 'February 07, 2016' + assert i18n.format_date(format, date=date, language='en') == 'February 07, 2016' + assert i18n.format_date(format, date=date, language='ja') == '2月 07, 2016' + assert i18n.format_date(format, date=date, language='de') == 'Februar 07, 2016' + + # raw string + format = 'Mon Mar 28 12:37:08 2016, commit 4367aef' + assert i18n.format_date(format, date=date, language='en') == format + + format = '%B %d, %Y, %H:%M:%S %I %p' + datet = datetime.datetime(2016, 2, 7, 5, 11, 17, 0) # NoQA: DTZ001 + assert i18n.format_date(format, date=datet, language='en') == 'February 07, 2016, 05:11:17 05 AM' + + format = '%B %-d, %Y, %-H:%-M:%-S %-I %p' + assert i18n.format_date(format, date=datet, language='en') == 'February 7, 2016, 5:11:17 5 AM' + format = '%x' + assert i18n.format_date(format, date=datet, language='en') == 'Feb 7, 2016' + format = '%X' + if BABEL_VERSION >= (2, 12): + assert i18n.format_date(format, date=datet, language='en') == '5:11:17\u202fAM' + else: + assert i18n.format_date(format, date=datet, language='en') == '5:11:17 AM' + assert i18n.format_date(format, date=date, language='en') == 'Feb 7, 2016' + format = '%c' + if BABEL_VERSION >= (2, 12): + assert i18n.format_date(format, date=datet, language='en') == 'Feb 7, 2016, 5:11:17\u202fAM' + else: + assert i18n.format_date(format, date=datet, language='en') == 'Feb 7, 2016, 5:11:17 AM' + assert i18n.format_date(format, date=date, language='en') == 'Feb 7, 2016' + + # timezone + format = '%Z' + assert i18n.format_date(format, date=datet, language='en') == 'UTC' + format = '%z' + assert i18n.format_date(format, date=datet, language='en') == '+0000' + + +def test_get_filename_for_language(app): + app.env.temp_data['docname'] = 'index' + + # language is en + app.env.config.language = 'en' + assert i18n.get_image_filename_for_language('foo.png', app.env) == 'foo.en.png' + assert i18n.get_image_filename_for_language('foo.bar.png', app.env) == 'foo.bar.en.png' + assert i18n.get_image_filename_for_language('dir/foo.png', app.env) == 'dir/foo.en.png' + assert i18n.get_image_filename_for_language('../foo.png', app.env) == '../foo.en.png' + assert i18n.get_image_filename_for_language('foo', app.env) == 'foo.en' + + # modify figure_language_filename and language is 'en' + app.env.config.language = 'en' + app.env.config.figure_language_filename = 'images/{language}/{root}{ext}' + assert i18n.get_image_filename_for_language('foo.png', app.env) == 'images/en/foo.png' + assert i18n.get_image_filename_for_language( + 'foo.bar.png', app.env) == 'images/en/foo.bar.png' + assert i18n.get_image_filename_for_language( + 'subdir/foo.png', app.env) == 'images/en/subdir/foo.png' + assert i18n.get_image_filename_for_language( + '../foo.png', app.env) == 'images/en/../foo.png' + assert i18n.get_image_filename_for_language('foo', app.env) == 'images/en/foo' + + # new path and basename tokens + app.env.config.language = 'en' + app.env.config.figure_language_filename = '{path}{language}/{basename}{ext}' + assert i18n.get_image_filename_for_language('foo.png', app.env) == 'en/foo.png' + assert i18n.get_image_filename_for_language( + 'foo.bar.png', app.env) == 'en/foo.bar.png' + assert i18n.get_image_filename_for_language( + 'subdir/foo.png', app.env) == 'subdir/en/foo.png' + assert i18n.get_image_filename_for_language( + '../foo.png', app.env) == '../en/foo.png' + assert i18n.get_image_filename_for_language('foo', app.env) == 'en/foo' + + # invalid figure_language_filename + app.env.config.figure_language_filename = '{root}.{invalid}{ext}' + with pytest.raises(SphinxError): + i18n.get_image_filename_for_language('foo.png', app.env) + + # docpath (for a document in the top of source directory) + app.env.config.language = 'en' + app.env.config.figure_language_filename = '/{docpath}{language}/{basename}{ext}' + assert (i18n.get_image_filename_for_language('foo.png', app.env) == + '/en/foo.png') + + # docpath (for a document in the sub directory) + app.env.temp_data['docname'] = 'subdir/index' + assert (i18n.get_image_filename_for_language('foo.png', app.env) == + '/subdir/en/foo.png') + + +def test_CatalogRepository(tmp_path): + (tmp_path / 'loc1' / 'xx' / 'LC_MESSAGES').mkdir(parents=True, exist_ok=True) + (tmp_path / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#', encoding='utf8') + (tmp_path / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#', encoding='utf8') + (tmp_path / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub').mkdir(parents=True, exist_ok=True) + (tmp_path / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test3.po').write_text('#', encoding='utf8') + (tmp_path / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#', encoding='utf8') + (tmp_path / 'loc1' / 'xx' / 'LC_MESSAGES' / '.dotdir').mkdir(parents=True, exist_ok=True) + (tmp_path / 'loc1' / 'xx' / 'LC_MESSAGES' / '.dotdir' / 'test5.po').write_text('#', encoding='utf8') + (tmp_path / 'loc1' / 'yy' / 'LC_MESSAGES').mkdir(parents=True, exist_ok=True) + (tmp_path / 'loc1' / 'yy' / 'LC_MESSAGES' / 'test6.po').write_text('#', encoding='utf8') + (tmp_path / 'loc2' / 'xx' / 'LC_MESSAGES').mkdir(parents=True, exist_ok=True) + (tmp_path / 'loc2' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#', encoding='utf8') + (tmp_path / 'loc2' / 'xx' / 'LC_MESSAGES' / 'test7.po').write_text('#', encoding='utf8') + + # for language xx + repo = i18n.CatalogRepository(tmp_path, ['loc1', 'loc2'], 'xx', 'utf-8') + assert list(repo.locale_dirs) == [str(tmp_path / 'loc1'), + str(tmp_path / 'loc2')] + assert all(isinstance(c, i18n.CatalogInfo) for c in repo.catalogs) + assert sorted(c.domain for c in repo.catalogs) == ['sub/test3', 'sub/test4', + 'test1', 'test1', 'test2', 'test7'] + + # for language yy + repo = i18n.CatalogRepository(tmp_path, ['loc1', 'loc2'], 'yy', 'utf-8') + assert sorted(c.domain for c in repo.catalogs) == ['test6'] + + # unknown languages + repo = i18n.CatalogRepository(tmp_path, ['loc1', 'loc2'], 'zz', 'utf-8') + assert sorted(c.domain for c in repo.catalogs) == [] + + # no languages + repo = i18n.CatalogRepository(tmp_path, ['loc1', 'loc2'], None, 'utf-8') + assert sorted(c.domain for c in repo.catalogs) == [] + + # unknown locale_dirs + repo = i18n.CatalogRepository(tmp_path, ['loc3'], None, 'utf-8') + assert sorted(c.domain for c in repo.catalogs) == [] + + # no locale_dirs + repo = i18n.CatalogRepository(tmp_path, [], None, 'utf-8') + assert sorted(c.domain for c in repo.catalogs) == [] diff --git a/tests/test_util_images.py b/tests/test_util_images.py new file mode 100644 index 0000000..15853c7 --- /dev/null +++ b/tests/test_util_images.py @@ -0,0 +1,74 @@ +"""Test images util.""" + +import pytest + +from sphinx.util.images import ( + get_image_extension, + get_image_size, + guess_mimetype, + parse_data_uri, +) + +GIF_FILENAME = 'img.gif' +PNG_FILENAME = 'img.png' +PDF_FILENAME = 'img.pdf' +TXT_FILENAME = 'index.txt' + + +def test_get_image_size(rootdir): + assert get_image_size(rootdir / 'test-root' / GIF_FILENAME) == (200, 181) + assert get_image_size(rootdir / 'test-root' / PNG_FILENAME) == (200, 181) + assert get_image_size(rootdir / 'test-root' / PDF_FILENAME) is None + assert get_image_size(rootdir / 'test-root' / TXT_FILENAME) is None + + +@pytest.mark.filterwarnings('ignore:The content argument') +def test_guess_mimetype(): + # guess by filename + assert guess_mimetype('img.png') == 'image/png' + assert guess_mimetype('img.jpg') == 'image/jpeg' + assert guess_mimetype('img.txt') is None + assert guess_mimetype('img.txt', default='text/plain') == 'text/plain' + assert guess_mimetype('no_extension') is None + assert guess_mimetype('IMG.PNG') == 'image/png' + + # default parameter is used when no extension + assert guess_mimetype('img.png', 'text/plain') == 'image/png' + assert guess_mimetype('no_extension', 'text/plain') == 'text/plain' + + +def test_get_image_extension(): + assert get_image_extension('image/png') == '.png' + assert get_image_extension('image/jpeg') == '.jpg' + assert get_image_extension('image/svg+xml') == '.svg' + assert get_image_extension('text/plain') is None + + +def test_parse_data_uri(): + # standard case + uri = ("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4" + "//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==") + image = parse_data_uri(uri) + assert image is not None + assert image.mimetype == 'image/png' + assert image.charset == 'US-ASCII' + + # no mimetype + uri = ("data:charset=utf-8,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElE" + "QVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==") + image = parse_data_uri(uri) + assert image is not None + assert image.mimetype == 'text/plain' + assert image.charset == 'utf-8' + + # non data URI + uri = ("image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4" + "//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==") + image = parse_data_uri(uri) + assert image is None + + # invalid data URI (no properties) + uri = ("data:iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4" + "//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==") + with pytest.raises(ValueError, match=r'not enough values to unpack \(expected 2, got 1\)'): + parse_data_uri(uri) diff --git a/tests/test_util_inspect.py b/tests/test_util_inspect.py new file mode 100644 index 0000000..73f9656 --- /dev/null +++ b/tests/test_util_inspect.py @@ -0,0 +1,869 @@ +"""Tests util.inspect functions.""" + +from __future__ import annotations + +import ast +import datetime +import enum +import functools +import sys +import types +from inspect import Parameter +from typing import Callable, List, Optional, Union # NoQA: UP035 + +import pytest + +from sphinx.util import inspect +from sphinx.util.inspect import TypeAliasForwardRef, TypeAliasNamespace, stringify_signature +from sphinx.util.typing import stringify_annotation + + +class Base: + def meth(self): + pass + + @staticmethod + def staticmeth(): + pass + + @classmethod + def classmeth(cls): + pass + + @property + def prop(self): + pass + + partialmeth = functools.partialmethod(meth) + + async def coroutinemeth(self): + pass + + partial_coroutinemeth = functools.partialmethod(coroutinemeth) + + @classmethod + async def coroutineclassmeth(cls): + """A documented coroutine classmethod""" + pass + + +class Inherited(Base): + pass + + +def func(): + pass + + +async def coroutinefunc(): + pass + + +async def asyncgenerator(): + yield + +partial_func = functools.partial(func) +partial_coroutinefunc = functools.partial(coroutinefunc) + +builtin_func = print +partial_builtin_func = functools.partial(print) + + +class Descriptor: + def __get__(self, obj, typ=None): + pass + + +class _Callable: + def __call__(self): + pass + + +def _decorator(f): + @functools.wraps(f) + def wrapper(): + return f() + return wrapper + + +def test_TypeAliasForwardRef(): + alias = TypeAliasForwardRef('example') + assert stringify_annotation(alias, 'fully-qualified-except-typing') == 'example' + + alias = Optional[alias] + assert stringify_annotation(alias, 'fully-qualified-except-typing') == 'example | None' + + +def test_TypeAliasNamespace(): + import logging.config + type_alias = TypeAliasNamespace({'logging.Filter': 'MyFilter', + 'logging.Handler': 'MyHandler', + 'logging.handlers.SyslogHandler': 'MySyslogHandler'}) + + assert type_alias['logging'].Filter == 'MyFilter' + assert type_alias['logging'].Handler == 'MyHandler' + assert type_alias['logging'].handlers.SyslogHandler == 'MySyslogHandler' + assert type_alias['logging'].Logger == logging.Logger + assert type_alias['logging'].config == logging.config + + with pytest.raises(KeyError): + assert type_alias['log'] + + with pytest.raises(KeyError): + assert type_alias['unknown'] + + +def test_signature(): + # literals + with pytest.raises(TypeError): + inspect.signature(1) + + with pytest.raises(TypeError): + inspect.signature('') + + # builtins are supported on a case-by-case basis, depending on whether + # they define __text_signature__ + if getattr(list, '__text_signature__', None): + sig = inspect.stringify_signature(inspect.signature(list)) + assert sig == '(iterable=(), /)' + else: + with pytest.raises(ValueError, match='no signature found for builtin type'): + inspect.signature(list) + with pytest.raises(ValueError, match='no signature found for builtin type'): + inspect.signature(range) + + # normal function + def func(a, b, c=1, d=2, *e, **f): + pass + + sig = inspect.stringify_signature(inspect.signature(func)) + assert sig == '(a, b, c=1, d=2, *e, **f)' + + +def test_signature_partial(): + def fun(a, b, c=1, d=2): + pass + p = functools.partial(fun, 10, c=11) + + sig = inspect.signature(p) + assert stringify_signature(sig) == '(b, *, c=11, d=2)' + + +def test_signature_methods(): + class Foo: + def meth1(self, arg1, **kwargs): + pass + + @classmethod + def meth2(cls, arg1, *args, **kwargs): + pass + + @staticmethod + def meth3(arg1, *args, **kwargs): + pass + + @functools.wraps(Foo().meth1) + def wrapped_bound_method(*args, **kwargs): + pass + + # unbound method + sig = inspect.signature(Foo.meth1) + assert stringify_signature(sig) == '(self, arg1, **kwargs)' + + sig = inspect.signature(Foo.meth1, bound_method=True) + assert stringify_signature(sig) == '(arg1, **kwargs)' + + # bound method + sig = inspect.signature(Foo().meth1) + assert stringify_signature(sig) == '(arg1, **kwargs)' + + # class method + sig = inspect.signature(Foo.meth2) + assert stringify_signature(sig) == '(arg1, *args, **kwargs)' + + sig = inspect.signature(Foo().meth2) + assert stringify_signature(sig) == '(arg1, *args, **kwargs)' + + # static method + sig = inspect.signature(Foo.meth3) + assert stringify_signature(sig) == '(arg1, *args, **kwargs)' + + sig = inspect.signature(Foo().meth3) + assert stringify_signature(sig) == '(arg1, *args, **kwargs)' + + # wrapped bound method + sig = inspect.signature(wrapped_bound_method) + assert stringify_signature(sig) == '(arg1, **kwargs)' + + +def test_signature_partialmethod(): + from functools import partialmethod + + class Foo: + def meth1(self, arg1, arg2, arg3=None, arg4=None): + pass + + def meth2(self, arg1, arg2): + pass + + foo = partialmethod(meth1, 1, 2) + bar = partialmethod(meth1, 1, arg3=3) + baz = partialmethod(meth2, 1, 2) + + subject = Foo() + sig = inspect.signature(subject.foo) + assert stringify_signature(sig) == '(arg3=None, arg4=None)' + + sig = inspect.signature(subject.bar) + assert stringify_signature(sig) == '(arg2, *, arg3=3, arg4=None)' + + sig = inspect.signature(subject.baz) + assert stringify_signature(sig) == '()' + + +def test_signature_annotations(): + from .typing_test_data import ( + Node, + f0, + f1, + f2, + f3, + f4, + f5, + f6, + f7, + f8, + f9, + f10, + f11, + f12, + f13, + f14, + f15, + f16, + f17, + f18, + f19, + f20, + f21, + f22, + f23, + f24, + f25, + ) + + # Class annotations + sig = inspect.signature(f0) + assert stringify_signature(sig) == '(x: int, y: numbers.Integral) -> None' + + # Generic types with concrete parameters + sig = inspect.signature(f1) + assert stringify_signature(sig) == '(x: list[int]) -> typing.List[int]' + + # TypeVars and generic types with TypeVars + sig = inspect.signature(f2) + assert stringify_signature(sig) == ('(x: typing.List[tests.typing_test_data.T],' + ' y: typing.List[tests.typing_test_data.T_co],' + ' z: tests.typing_test_data.T' + ') -> typing.List[tests.typing_test_data.T_contra]') + + # Union types + sig = inspect.signature(f3) + assert stringify_signature(sig) == '(x: str | numbers.Integral) -> None' + + # Quoted annotations + sig = inspect.signature(f4) + assert stringify_signature(sig) == '(x: str, y: str) -> None' + + # Keyword-only arguments + sig = inspect.signature(f5) + assert stringify_signature(sig) == '(x: int, *, y: str, z: str) -> None' + + # Keyword-only arguments with varargs + sig = inspect.signature(f6) + assert stringify_signature(sig) == '(x: int, *args, y: str, z: str) -> None' + + # Space around '=' for defaults + sig = inspect.signature(f7) + if sys.version_info[:2] <= (3, 10): + assert stringify_signature(sig) == '(x: int | None = None, y: dict = {}) -> None' + else: + assert stringify_signature(sig) == '(x: int = None, y: dict = {}) -> None' + + # Callable types + sig = inspect.signature(f8) + assert stringify_signature(sig) == '(x: typing.Callable[[int, str], int]) -> None' + + sig = inspect.signature(f9) + assert stringify_signature(sig) == '(x: typing.Callable) -> None' + + # Tuple types + sig = inspect.signature(f10) + assert stringify_signature(sig) == '(x: typing.Tuple[int, str], y: typing.Tuple[int, ...]) -> None' + + # Instance annotations + sig = inspect.signature(f11) + assert stringify_signature(sig) == '(x: CustomAnnotation, y: 123) -> None' + + # tuple with more than two items + sig = inspect.signature(f12) + assert stringify_signature(sig) == '() -> typing.Tuple[int, str, int]' + + # optional + sig = inspect.signature(f13) + assert stringify_signature(sig) == '() -> str | None' + + # optional union + sig = inspect.signature(f20) + assert stringify_signature(sig) in ('() -> int | str | None', + '() -> str | int | None') + + # Any + sig = inspect.signature(f14) + assert stringify_signature(sig) == '() -> typing.Any' + + # ForwardRef + sig = inspect.signature(f15) + assert stringify_signature(sig) == '(x: Unknown, y: int) -> typing.Any' + + # keyword only arguments (1) + sig = inspect.signature(f16) + assert stringify_signature(sig) == '(arg1, arg2, *, arg3=None, arg4=None)' + + # keyword only arguments (2) + sig = inspect.signature(f17) + assert stringify_signature(sig) == '(*, arg3, arg4)' + + sig = inspect.signature(f18) + assert stringify_signature(sig) == ('(self, arg1: int | typing.Tuple = 10) -> ' + 'typing.List[typing.Dict]') + + # annotations for variadic and keyword parameters + sig = inspect.signature(f19) + assert stringify_signature(sig) == '(*args: int, **kwargs: str)' + + # default value is inspect.Signature.empty + sig = inspect.signature(f21) + assert stringify_signature(sig) == "(arg1='whatever', arg2)" + + # type hints by string + sig = inspect.signature(Node.children) + assert stringify_signature(sig) == '(self) -> typing.List[tests.typing_test_data.Node]' + + sig = inspect.signature(Node.__init__) + assert stringify_signature(sig) == '(self, parent: tests.typing_test_data.Node | None) -> None' + + # show_annotation is False + sig = inspect.signature(f7) + assert stringify_signature(sig, show_annotation=False) == '(x=None, y={})' + + # show_return_annotation is False + sig = inspect.signature(f7) + if sys.version_info[:2] <= (3, 10): + assert stringify_signature(sig, show_return_annotation=False) == '(x: int | None = None, y: dict = {})' + else: + assert stringify_signature(sig, show_return_annotation=False) == '(x: int = None, y: dict = {})' + + # unqualified_typehints is True + sig = inspect.signature(f7) + if sys.version_info[:2] <= (3, 10): + assert stringify_signature(sig, unqualified_typehints=True) == '(x: int | None = None, y: dict = {}) -> None' + else: + assert stringify_signature(sig, unqualified_typehints=True) == '(x: int = None, y: dict = {}) -> None' + + # case: separator at head + sig = inspect.signature(f22) + assert stringify_signature(sig) == '(*, a, b)' + + # case: separator in the middle + sig = inspect.signature(f23) + assert stringify_signature(sig) == '(a, b, /, c, d)' + + sig = inspect.signature(f24) + assert stringify_signature(sig) == '(a, /, *, b)' + + # case: separator at tail + sig = inspect.signature(f25) + assert stringify_signature(sig) == '(a, b, /)' + + +def test_signature_from_str_basic(): + signature = '(a, b, *args, c=0, d="blah", **kwargs)' + sig = inspect.signature_from_str(signature) + assert list(sig.parameters.keys()) == ['a', 'b', 'args', 'c', 'd', 'kwargs'] + assert sig.parameters['a'].name == 'a' + assert sig.parameters['a'].kind == Parameter.POSITIONAL_OR_KEYWORD + assert sig.parameters['a'].default == Parameter.empty + assert sig.parameters['a'].annotation == Parameter.empty + assert sig.parameters['b'].name == 'b' + assert sig.parameters['b'].kind == Parameter.POSITIONAL_OR_KEYWORD + assert sig.parameters['b'].default == Parameter.empty + assert sig.parameters['b'].annotation == Parameter.empty + assert sig.parameters['args'].name == 'args' + assert sig.parameters['args'].kind == Parameter.VAR_POSITIONAL + assert sig.parameters['args'].default == Parameter.empty + assert sig.parameters['args'].annotation == Parameter.empty + assert sig.parameters['c'].name == 'c' + assert sig.parameters['c'].kind == Parameter.KEYWORD_ONLY + assert sig.parameters['c'].default == '0' + assert sig.parameters['c'].annotation == Parameter.empty + assert sig.parameters['d'].name == 'd' + assert sig.parameters['d'].kind == Parameter.KEYWORD_ONLY + assert sig.parameters['d'].default == "'blah'" + assert sig.parameters['d'].annotation == Parameter.empty + assert sig.parameters['kwargs'].name == 'kwargs' + assert sig.parameters['kwargs'].kind == Parameter.VAR_KEYWORD + assert sig.parameters['kwargs'].default == Parameter.empty + assert sig.parameters['kwargs'].annotation == Parameter.empty + assert sig.return_annotation == Parameter.empty + + +def test_signature_from_str_default_values(): + signature = ('(a=0, b=0.0, c="str", d=b"bytes", e=..., f=True, ' + 'g=[1, 2, 3], h={"a": 1}, i={1, 2, 3}, ' + 'j=lambda x, y: None, k=None, l=object(), m=foo.bar.CONSTANT)') + sig = inspect.signature_from_str(signature) + assert sig.parameters['a'].default == '0' + assert sig.parameters['b'].default == '0.0' + assert sig.parameters['c'].default == "'str'" + assert sig.parameters['d'].default == "b'bytes'" + assert sig.parameters['e'].default == '...' + assert sig.parameters['f'].default == 'True' + assert sig.parameters['g'].default == '[1, 2, 3]' + assert sig.parameters['h'].default == "{'a': 1}" + assert sig.parameters['i'].default == '{1, 2, 3}' + assert sig.parameters['j'].default == 'lambda x, y: ...' + assert sig.parameters['k'].default == 'None' + assert sig.parameters['l'].default == 'object()' + assert sig.parameters['m'].default == 'foo.bar.CONSTANT' + + +def test_signature_from_str_annotations(): + signature = '(a: int, *args: bytes, b: str = "blah", **kwargs: float) -> None' + sig = inspect.signature_from_str(signature) + assert list(sig.parameters.keys()) == ['a', 'args', 'b', 'kwargs'] + assert sig.parameters['a'].annotation == "int" + assert sig.parameters['args'].annotation == "bytes" + assert sig.parameters['b'].annotation == "str" + assert sig.parameters['kwargs'].annotation == "float" + assert sig.return_annotation == 'None' + + +def test_signature_from_str_complex_annotations(): + sig = inspect.signature_from_str('() -> Tuple[str, int, ...]') + assert sig.return_annotation == 'Tuple[str, int, ...]' + + sig = inspect.signature_from_str('() -> Callable[[int, int], int]') + assert sig.return_annotation == 'Callable[[int, int], int]' + + +def test_signature_from_str_kwonly_args(): + sig = inspect.signature_from_str('(a, *, b)') + assert list(sig.parameters.keys()) == ['a', 'b'] + assert sig.parameters['a'].kind == Parameter.POSITIONAL_OR_KEYWORD + assert sig.parameters['a'].default == Parameter.empty + assert sig.parameters['b'].kind == Parameter.KEYWORD_ONLY + assert sig.parameters['b'].default == Parameter.empty + + +def test_signature_from_str_positionaly_only_args(): + sig = inspect.signature_from_str('(a, b=0, /, c=1)') + assert list(sig.parameters.keys()) == ['a', 'b', 'c'] + assert sig.parameters['a'].kind == Parameter.POSITIONAL_ONLY + assert sig.parameters['a'].default == Parameter.empty + assert sig.parameters['b'].kind == Parameter.POSITIONAL_ONLY + assert sig.parameters['b'].default == '0' + assert sig.parameters['c'].kind == Parameter.POSITIONAL_OR_KEYWORD + assert sig.parameters['c'].default == '1' + + +def test_signature_from_str_invalid(): + with pytest.raises(SyntaxError): + inspect.signature_from_str('') + + +def test_signature_from_ast(): + signature = 'def func(a, b, *args, c=0, d="blah", **kwargs): pass' + tree = ast.parse(signature) + sig = inspect.signature_from_ast(tree.body[0]) + assert list(sig.parameters.keys()) == ['a', 'b', 'args', 'c', 'd', 'kwargs'] + assert sig.parameters['a'].name == 'a' + assert sig.parameters['a'].kind == Parameter.POSITIONAL_OR_KEYWORD + assert sig.parameters['a'].default == Parameter.empty + assert sig.parameters['a'].annotation == Parameter.empty + assert sig.parameters['b'].name == 'b' + assert sig.parameters['b'].kind == Parameter.POSITIONAL_OR_KEYWORD + assert sig.parameters['b'].default == Parameter.empty + assert sig.parameters['b'].annotation == Parameter.empty + assert sig.parameters['args'].name == 'args' + assert sig.parameters['args'].kind == Parameter.VAR_POSITIONAL + assert sig.parameters['args'].default == Parameter.empty + assert sig.parameters['args'].annotation == Parameter.empty + assert sig.parameters['c'].name == 'c' + assert sig.parameters['c'].kind == Parameter.KEYWORD_ONLY + assert sig.parameters['c'].default == '0' + assert sig.parameters['c'].annotation == Parameter.empty + assert sig.parameters['d'].name == 'd' + assert sig.parameters['d'].kind == Parameter.KEYWORD_ONLY + assert sig.parameters['d'].default == "'blah'" + assert sig.parameters['d'].annotation == Parameter.empty + assert sig.parameters['kwargs'].name == 'kwargs' + assert sig.parameters['kwargs'].kind == Parameter.VAR_KEYWORD + assert sig.parameters['kwargs'].default == Parameter.empty + assert sig.parameters['kwargs'].annotation == Parameter.empty + assert sig.return_annotation == Parameter.empty + + +def test_safe_getattr_with_default(): + class Foo: + def __getattr__(self, item): + raise Exception + + obj = Foo() + + result = inspect.safe_getattr(obj, 'bar', 'baz') + + assert result == 'baz' + + +def test_safe_getattr_with_exception(): + class Foo: + def __getattr__(self, item): + raise Exception + + obj = Foo() + + with pytest.raises(AttributeError, match='bar'): + inspect.safe_getattr(obj, 'bar') + + +def test_safe_getattr_with_property_exception(): + class Foo: + @property + def bar(self): + raise Exception + + obj = Foo() + + with pytest.raises(AttributeError, match='bar'): + inspect.safe_getattr(obj, 'bar') + + +def test_safe_getattr_with___dict___override(): + class Foo: + @property + def __dict__(self): + raise Exception + + obj = Foo() + + with pytest.raises(AttributeError, match='bar'): + inspect.safe_getattr(obj, 'bar') + + +def test_dictionary_sorting(): + dictionary = {"c": 3, "a": 1, "d": 2, "b": 4} + description = inspect.object_description(dictionary) + assert description == "{'a': 1, 'b': 4, 'c': 3, 'd': 2}" + + +def test_set_sorting(): + set_ = set("gfedcba") + description = inspect.object_description(set_) + assert description == "{'a', 'b', 'c', 'd', 'e', 'f', 'g'}" + + +def test_set_sorting_enum(): + class MyEnum(enum.Enum): + a = 1 + b = 2 + c = 3 + + set_ = set(MyEnum) + description = inspect.object_description(set_) + assert description == "{MyEnum.a, MyEnum.b, MyEnum.c}" + + +def test_set_sorting_fallback(): + set_ = {None, 1} + description = inspect.object_description(set_) + assert description == "{1, None}" + + +def test_deterministic_nested_collection_descriptions(): + # sortable + assert inspect.object_description([{1, 2, 3, 10}]) == "[{1, 2, 3, 10}]" + assert inspect.object_description(({1, 2, 3, 10},)) == "({1, 2, 3, 10},)" + # non-sortable (elements of varying datatype) + assert inspect.object_description([{None, 1}]) == "[{1, None}]" + assert inspect.object_description(({None, 1},)) == "({1, None},)" + assert inspect.object_description([{None, 1, 'A'}]) == "[{'A', 1, None}]" + assert inspect.object_description(({None, 1, 'A'},)) == "({'A', 1, None},)" + + +def test_frozenset_sorting(): + frozenset_ = frozenset("gfedcba") + description = inspect.object_description(frozenset_) + assert description == "frozenset({'a', 'b', 'c', 'd', 'e', 'f', 'g'})" + + +def test_frozenset_sorting_fallback(): + frozenset_ = frozenset((None, 1)) + description = inspect.object_description(frozenset_) + assert description == "frozenset({1, None})" + + +def test_nested_tuple_sorting(): + tuple_ = ({"c", "b", "a"},) # nb. trailing comma + description = inspect.object_description(tuple_) + assert description == "({'a', 'b', 'c'},)" + + tuple_ = ({"c", "b", "a"}, {"f", "e", "d"}) + description = inspect.object_description(tuple_) + assert description == "({'a', 'b', 'c'}, {'d', 'e', 'f'})" + + +def test_recursive_collection_description(): + dict_a_, dict_b_ = {"a": 1}, {"b": 2} + dict_a_["link"], dict_b_["link"] = dict_b_, dict_a_ + description_a, description_b = ( + inspect.object_description(dict_a_), + inspect.object_description(dict_b_), + ) + assert description_a == "{'a': 1, 'link': {'b': 2, 'link': dict(...)}}" + assert description_b == "{'b': 2, 'link': {'a': 1, 'link': dict(...)}}" + + list_c_, list_d_ = [1, 2, 3, 4], [5, 6, 7, 8] + list_c_.append(list_d_) + list_d_.append(list_c_) + description_c, description_d = ( + inspect.object_description(list_c_), + inspect.object_description(list_d_), + ) + + assert description_c == "[1, 2, 3, 4, [5, 6, 7, 8, list(...)]]" + assert description_d == "[5, 6, 7, 8, [1, 2, 3, 4, list(...)]]" + + +def test_dict_customtype(): + class CustomType: + def __init__(self, value): + self._value = value + + def __repr__(self): + return "<CustomType(%r)>" % self._value + + dictionary = {CustomType(2): 2, CustomType(1): 1} + description = inspect.object_description(dictionary) + # Type is unsortable, just check that it does not crash + assert "<CustomType(2)>: 2" in description + + +def test_object_description_enum(): + class MyEnum(enum.Enum): + FOO = 1 + BAR = 2 + + assert inspect.object_description(MyEnum.FOO) == "MyEnum.FOO" + + +def test_getslots(): + class Foo: + pass + + class Bar: + __slots__ = ['attr'] + + class Baz: + __slots__ = {'attr': 'docstring'} + + class Qux: + __slots__ = 'attr' + + assert inspect.getslots(Foo) is None + assert inspect.getslots(Bar) == {'attr': None} + assert inspect.getslots(Baz) == {'attr': 'docstring'} + assert inspect.getslots(Qux) == {'attr': None} + + with pytest.raises(TypeError): + inspect.getslots(Bar()) + + +def test_isclassmethod(): + assert inspect.isclassmethod(Base.classmeth) is True + assert inspect.isclassmethod(Base.meth) is False + assert inspect.isclassmethod(Inherited.classmeth) is True + assert inspect.isclassmethod(Inherited.meth) is False + + +def test_isstaticmethod(): + assert inspect.isstaticmethod(Base.staticmeth, Base, 'staticmeth') is True + assert inspect.isstaticmethod(Base.meth, Base, 'meth') is False + assert inspect.isstaticmethod(Inherited.staticmeth, Inherited, 'staticmeth') is True + assert inspect.isstaticmethod(Inherited.meth, Inherited, 'meth') is False + + +def test_iscoroutinefunction(): + assert inspect.iscoroutinefunction(func) is False # function + assert inspect.iscoroutinefunction(coroutinefunc) is True # coroutine + assert inspect.iscoroutinefunction(partial_coroutinefunc) is True # partial-ed coroutine + assert inspect.iscoroutinefunction(Base.meth) is False # method + assert inspect.iscoroutinefunction(Base.coroutinemeth) is True # coroutine-method + assert inspect.iscoroutinefunction(Base.__dict__["coroutineclassmeth"]) is True # coroutine classmethod + + # partial-ed coroutine-method + partial_coroutinemeth = Base.__dict__['partial_coroutinemeth'] + assert inspect.iscoroutinefunction(partial_coroutinemeth) is True + + +def test_iscoroutinefunction_wrapped(): + # function wrapping a callable obj + assert inspect.isfunction(_decorator(coroutinefunc)) is True + + +def test_isfunction(): + assert inspect.isfunction(func) is True # function + assert inspect.isfunction(partial_func) is True # partial-ed function + assert inspect.isfunction(Base.meth) is True # method of class + assert inspect.isfunction(Base.partialmeth) is True # partial-ed method of class + assert inspect.isfunction(Base().meth) is False # method of instance + assert inspect.isfunction(builtin_func) is False # builtin function + assert inspect.isfunction(partial_builtin_func) is False # partial-ed builtin function + + +def test_isfunction_wrapped(): + # function wrapping a callable obj + assert inspect.isfunction(_decorator(_Callable())) is True + + +def test_isbuiltin(): + assert inspect.isbuiltin(builtin_func) is True # builtin function + assert inspect.isbuiltin(partial_builtin_func) is True # partial-ed builtin function + assert inspect.isbuiltin(func) is False # function + assert inspect.isbuiltin(partial_func) is False # partial-ed function + assert inspect.isbuiltin(Base.meth) is False # method of class + assert inspect.isbuiltin(Base().meth) is False # method of instance + + +def test_isdescriptor(): + assert inspect.isdescriptor(Base.prop) is True # property of class + assert inspect.isdescriptor(Base().prop) is False # property of instance + assert inspect.isdescriptor(Base.meth) is True # method of class + assert inspect.isdescriptor(Base().meth) is True # method of instance + assert inspect.isdescriptor(func) is True # function + + +def test_isattributedescriptor(): + assert inspect.isattributedescriptor(Base.prop) is True # property + assert inspect.isattributedescriptor(Base.meth) is False # method + assert inspect.isattributedescriptor(Base.staticmeth) is False # staticmethod + assert inspect.isattributedescriptor(Base.classmeth) is False # classmetho + assert inspect.isattributedescriptor(Descriptor) is False # custom descriptor class + assert inspect.isattributedescriptor(str.join) is False # MethodDescriptorType + assert inspect.isattributedescriptor(object.__init__) is False # WrapperDescriptorType + assert inspect.isattributedescriptor(dict.__dict__['fromkeys']) is False # ClassMethodDescriptorType + assert inspect.isattributedescriptor(types.FrameType.f_locals) is True # GetSetDescriptorType + assert inspect.isattributedescriptor(datetime.timedelta.days) is True # MemberDescriptorType + + try: + # _testcapi module cannot be importable in some distro + # refs: https://github.com/sphinx-doc/sphinx/issues/9868 + import _testcapi + + testinstancemethod = _testcapi.instancemethod(str.__repr__) + assert inspect.isattributedescriptor(testinstancemethod) is False # instancemethod (C-API) + except ImportError: + pass + + +def test_isproperty(): + assert inspect.isproperty(Base.prop) is True # property of class + assert inspect.isproperty(Base().prop) is False # property of instance + assert inspect.isproperty(Base.meth) is False # method of class + assert inspect.isproperty(Base().meth) is False # method of instance + assert inspect.isproperty(func) is False # function + + +def test_isgenericalias(): + #: A list of int + T = List[int] # NoQA: UP006 + S = list[Union[str, None]] + + C = Callable[[int], None] # a generic alias not having a doccomment + + assert inspect.isgenericalias(C) is True + assert inspect.isgenericalias(Callable) is True + assert inspect.isgenericalias(T) is True + assert inspect.isgenericalias(List) is True # NoQA: UP006 + assert inspect.isgenericalias(S) is True + assert inspect.isgenericalias(list) is False + assert inspect.isgenericalias([]) is False + assert inspect.isgenericalias(object()) is False + assert inspect.isgenericalias(Base) is False + + +def test_unpartial(): + def func1(a, b, c): + pass + + func2 = functools.partial(func1, 1) + func2.__doc__ = "func2" + func3 = functools.partial(func2, 2) # nested partial object + + assert inspect.unpartial(func2) is func1 + assert inspect.unpartial(func3) is func1 + + +def test_getdoc_inherited_classmethod(): + class Foo: + @classmethod + def meth(self): + """ + docstring + indented text + """ + + class Bar(Foo): + @classmethod + def meth(self): + # inherited classmethod + pass + + assert inspect.getdoc(Bar.meth, getattr, False, Bar, "meth") is None + assert inspect.getdoc(Bar.meth, getattr, True, Bar, "meth") == Foo.meth.__doc__ + + +def test_getdoc_inherited_decorated_method(): + class Foo: + def meth(self): + """ + docstring + indented text + """ + + class Bar(Foo): + @functools.lru_cache # noqa: B019 + def meth(self): + # inherited and decorated method + pass + + assert inspect.getdoc(Bar.meth, getattr, False, Bar, "meth") is None + assert inspect.getdoc(Bar.meth, getattr, True, Bar, "meth") == Foo.meth.__doc__ + + +def test_is_builtin_class_method(): + class MyInt(int): + def my_method(self): + pass + + assert inspect.is_builtin_class_method(MyInt, 'to_bytes') + assert inspect.is_builtin_class_method(MyInt, '__init__') + assert not inspect.is_builtin_class_method(MyInt, 'my_method') + assert not inspect.is_builtin_class_method(MyInt, 'does_not_exist') + assert not inspect.is_builtin_class_method(4, 'still does not crash') + + class ObjectWithMroAttr: + def __init__(self, mro_attr): + self.__mro__ = mro_attr + + assert not inspect.is_builtin_class_method(ObjectWithMroAttr([1, 2, 3]), 'still does not crash') diff --git a/tests/test_util_inventory.py b/tests/test_util_inventory.py new file mode 100644 index 0000000..2c20763 --- /dev/null +++ b/tests/test_util_inventory.py @@ -0,0 +1,116 @@ +"""Test inventory util functions.""" +import os +import posixpath +import zlib +from io import BytesIO + +from sphinx.testing.util import SphinxTestApp +from sphinx.util.inventory import InventoryFile + +inventory_v1 = b'''\ +# Sphinx inventory version 1 +# Project: foo +# Version: 1.0 +module mod foo.html +module.cls class foo.html +''' + +inventory_v2 = b'''\ +# Sphinx inventory version 2 +# Project: foo +# Version: 2.0 +# The remainder of this file is compressed with zlib. +''' + zlib.compress(b'''\ +module1 py:module 0 foo.html#module-module1 Long Module desc +module2 py:module 0 foo.html#module-$ - +module1.func py:function 1 sub/foo.html#$ - +module1.Foo.bar py:method 1 index.html#foo.Bar.baz - +CFunc c:function 2 cfunc.html#CFunc - +std cpp:type 1 index.html#std - +std::uint8_t cpp:type 1 index.html#std_uint8_t - +foo::Bar cpp:class 1 index.html#cpp_foo_bar - +foo::Bar::baz cpp:function 1 index.html#cpp_foo_bar_baz - +foons cpp:type 1 index.html#foons - +foons::bartype cpp:type 1 index.html#foons_bartype - +a term std:term -1 glossary.html#term-a-term - +ls.-l std:cmdoption 1 index.html#cmdoption-ls-l - +docname std:doc -1 docname.html - +foo js:module 1 index.html#foo - +foo.bar js:class 1 index.html#foo.bar - +foo.bar.baz js:method 1 index.html#foo.bar.baz - +foo.bar.qux js:data 1 index.html#foo.bar.qux - +a term including:colon std:term -1 glossary.html#term-a-term-including-colon - +''') + +inventory_v2_not_having_version = b'''\ +# Sphinx inventory version 2 +# Project: foo +# Version: +# The remainder of this file is compressed with zlib. +''' + zlib.compress(b'''\ +module1 py:module 0 foo.html#module-module1 Long Module desc +''') + + +def test_read_inventory_v1(): + f = BytesIO(inventory_v1) + invdata = InventoryFile.load(f, '/util', posixpath.join) + assert invdata['py:module']['module'] == \ + ('foo', '1.0', '/util/foo.html#module-module', '-') + assert invdata['py:class']['module.cls'] == \ + ('foo', '1.0', '/util/foo.html#module.cls', '-') + + +def test_read_inventory_v2(): + f = BytesIO(inventory_v2) + invdata = InventoryFile.load(f, '/util', posixpath.join) + + assert len(invdata['py:module']) == 2 + assert invdata['py:module']['module1'] == \ + ('foo', '2.0', '/util/foo.html#module-module1', 'Long Module desc') + assert invdata['py:module']['module2'] == \ + ('foo', '2.0', '/util/foo.html#module-module2', '-') + assert invdata['py:function']['module1.func'][2] == \ + '/util/sub/foo.html#module1.func' + assert invdata['c:function']['CFunc'][2] == '/util/cfunc.html#CFunc' + assert invdata['std:term']['a term'][2] == \ + '/util/glossary.html#term-a-term' + assert invdata['std:term']['a term including:colon'][2] == \ + '/util/glossary.html#term-a-term-including-colon' + + +def test_read_inventory_v2_not_having_version(): + f = BytesIO(inventory_v2_not_having_version) + invdata = InventoryFile.load(f, '/util', posixpath.join) + assert invdata['py:module']['module1'] == \ + ('foo', '', '/util/foo.html#module-module1', 'Long Module desc') + + +def _write_appconfig(dir, language, prefix=None): + prefix = prefix or language + os.makedirs(dir / prefix, exist_ok=True) + (dir / prefix / 'conf.py').write_text(f'language = "{language}"', encoding='utf8') + (dir / prefix / 'index.rst').write_text('index.rst', encoding='utf8') + assert sorted(os.listdir(dir / prefix)) == ['conf.py', 'index.rst'] + assert (dir / prefix / 'index.rst').exists() + return dir / prefix + + +def _build_inventory(srcdir): + app = SphinxTestApp(srcdir=srcdir) + app.build() + app.cleanup() + return (app.outdir / 'objects.inv') + + +def test_inventory_localization(tmp_path): + # Build an app using Estonian (EE) locale + srcdir_et = _write_appconfig(tmp_path, "et") + inventory_et = _build_inventory(srcdir_et) + + # Build the same app using English (US) locale + srcdir_en = _write_appconfig(tmp_path, "en") + inventory_en = _build_inventory(srcdir_en) + + # Ensure that the inventory contents differ + assert inventory_et.read_bytes() != inventory_en.read_bytes() diff --git a/tests/test_util_logging.py b/tests/test_util_logging.py new file mode 100644 index 0000000..4d506a8 --- /dev/null +++ b/tests/test_util_logging.py @@ -0,0 +1,398 @@ +"""Test logging util.""" + +import codecs +import os +import os.path + +import pytest +from docutils import nodes + +from sphinx.errors import SphinxWarning +from sphinx.testing.util import strip_escseq +from sphinx.util import logging, osutil +from sphinx.util.console import colorize +from sphinx.util.logging import is_suppressed_warning, prefixed_warnings +from sphinx.util.parallel import ParallelTasks + + +def test_info_and_warning(app, status, warning): + app.verbosity = 2 + logging.setup(app, status, warning) + logger = logging.getLogger(__name__) + + logger.debug('message1') + logger.info('message2') + logger.warning('message3') + logger.critical('message4') + logger.error('message5') + + assert 'message1' in status.getvalue() + assert 'message2' in status.getvalue() + assert 'message3' not in status.getvalue() + assert 'message4' not in status.getvalue() + assert 'message5' not in status.getvalue() + + assert 'message1' not in warning.getvalue() + assert 'message2' not in warning.getvalue() + assert 'WARNING: message3' in warning.getvalue() + assert 'CRITICAL: message4' in warning.getvalue() + assert 'ERROR: message5' in warning.getvalue() + + +def test_Exception(app, status, warning): + logging.setup(app, status, warning) + logger = logging.getLogger(__name__) + + logger.info(Exception) + assert "<class 'Exception'>" in status.getvalue() + + +def test_verbosity_filter(app, status, warning): + # verbosity = 0: INFO + app.verbosity = 0 + logging.setup(app, status, warning) + logger = logging.getLogger(__name__) + + logger.info('message1') + logger.verbose('message2') + logger.debug('message3') + + assert 'message1' in status.getvalue() + assert 'message2' not in status.getvalue() + assert 'message3' not in status.getvalue() + assert 'message4' not in status.getvalue() + + # verbosity = 1: VERBOSE + app.verbosity = 1 + logging.setup(app, status, warning) + logger = logging.getLogger(__name__) + + logger.info('message1') + logger.verbose('message2') + logger.debug('message3') + + assert 'message1' in status.getvalue() + assert 'message2' in status.getvalue() + assert 'message3' not in status.getvalue() + assert 'message4' not in status.getvalue() + + # verbosity = 2: DEBUG + app.verbosity = 2 + logging.setup(app, status, warning) + logger = logging.getLogger(__name__) + + logger.info('message1') + logger.verbose('message2') + logger.debug('message3') + + assert 'message1' in status.getvalue() + assert 'message2' in status.getvalue() + assert 'message3' in status.getvalue() + assert 'message4' not in status.getvalue() + + +def test_nonl_info_log(app, status, warning): + logging.setup(app, status, warning) + logger = logging.getLogger(__name__) + + logger.info('message1', nonl=True) + logger.info('message2') + logger.info('message3') + + assert 'message1message2\nmessage3' in status.getvalue() + + +def test_once_warning_log(app, status, warning): + logging.setup(app, status, warning) + logger = logging.getLogger(__name__) + + logger.warning('message: %d', 1, once=True) + logger.warning('message: %d', 1, once=True) + logger.warning('message: %d', 2, once=True) + + assert 'WARNING: message: 1\nWARNING: message: 2\n' in strip_escseq(warning.getvalue()) + + +def test_is_suppressed_warning(): + suppress_warnings = ["ref", "files.*", "rest.duplicated_labels"] + + assert is_suppressed_warning(None, None, suppress_warnings) is False + assert is_suppressed_warning("ref", None, suppress_warnings) is True + assert is_suppressed_warning("ref", "numref", suppress_warnings) is True + assert is_suppressed_warning("ref", "option", suppress_warnings) is True + assert is_suppressed_warning("files", "image", suppress_warnings) is True + assert is_suppressed_warning("files", "stylesheet", suppress_warnings) is True + assert is_suppressed_warning("rest", None, suppress_warnings) is False + assert is_suppressed_warning("rest", "syntax", suppress_warnings) is False + assert is_suppressed_warning("rest", "duplicated_labels", suppress_warnings) is True + + +def test_suppress_warnings(app, status, warning): + logging.setup(app, status, warning) + logger = logging.getLogger(__name__) + + app._warncount = 0 # force reset + + app.config.suppress_warnings = [] + warning.truncate(0) + logger.warning('message0', type='test') + logger.warning('message1', type='test', subtype='logging') + logger.warning('message2', type='test', subtype='crash') + logger.warning('message3', type='actual', subtype='logging') + assert 'message0' in warning.getvalue() + assert 'message1' in warning.getvalue() + assert 'message2' in warning.getvalue() + assert 'message3' in warning.getvalue() + assert app._warncount == 4 + + app.config.suppress_warnings = ['test'] + warning.truncate(0) + logger.warning('message0', type='test') + logger.warning('message1', type='test', subtype='logging') + logger.warning('message2', type='test', subtype='crash') + logger.warning('message3', type='actual', subtype='logging') + assert 'message0' not in warning.getvalue() + assert 'message1' not in warning.getvalue() + assert 'message2' not in warning.getvalue() + assert 'message3' in warning.getvalue() + assert app._warncount == 5 + + app.config.suppress_warnings = ['test.logging'] + warning.truncate(0) + logger.warning('message0', type='test') + logger.warning('message1', type='test', subtype='logging') + logger.warning('message2', type='test', subtype='crash') + logger.warning('message3', type='actual', subtype='logging') + assert 'message0' in warning.getvalue() + assert 'message1' not in warning.getvalue() + assert 'message2' in warning.getvalue() + assert 'message3' in warning.getvalue() + assert app._warncount == 8 + + +def test_warningiserror(app, status, warning): + logging.setup(app, status, warning) + logger = logging.getLogger(__name__) + + # if False, warning is not error + app.warningiserror = False + logger.warning('message') + + # if True, warning raises SphinxWarning exception + app.warningiserror = True + with pytest.raises(SphinxWarning): + logger.warning('message: %s', 'arg') + + # message contains format string (refs: #4070) + with pytest.raises(SphinxWarning): + logger.warning('%s') + + +def test_info_location(app, status, warning): + logging.setup(app, status, warning) + logger = logging.getLogger(__name__) + + logger.info('message1', location='index') + assert 'index.txt: message1' in status.getvalue() + + logger.info('message2', location=('index', 10)) + assert 'index.txt:10: message2' in status.getvalue() + + logger.info('message3', location=None) + assert '\nmessage3' in status.getvalue() + + node = nodes.Node() + node.source, node.line = ('index.txt', 10) + logger.info('message4', location=node) + assert 'index.txt:10: message4' in status.getvalue() + + node.source, node.line = ('index.txt', None) + logger.info('message5', location=node) + assert 'index.txt:: message5' in status.getvalue() + + node.source, node.line = (None, 10) + logger.info('message6', location=node) + assert '<unknown>:10: message6' in status.getvalue() + + node.source, node.line = (None, None) + logger.info('message7', location=node) + assert '\nmessage7' in status.getvalue() + + +def test_warning_location(app, status, warning): + logging.setup(app, status, warning) + logger = logging.getLogger(__name__) + + logger.warning('message1', location='index') + assert 'index.txt: WARNING: message1' in warning.getvalue() + + logger.warning('message2', location=('index', 10)) + assert 'index.txt:10: WARNING: message2' in warning.getvalue() + + logger.warning('message3', location=None) + assert colorize('red', 'WARNING: message3') in warning.getvalue() + + node = nodes.Node() + node.source, node.line = ('index.txt', 10) + logger.warning('message4', location=node) + assert 'index.txt:10: WARNING: message4' in warning.getvalue() + + node.source, node.line = ('index.txt', None) + logger.warning('message5', location=node) + assert 'index.txt:: WARNING: message5' in warning.getvalue() + + node.source, node.line = (None, 10) + logger.warning('message6', location=node) + assert '<unknown>:10: WARNING: message6' in warning.getvalue() + + node.source, node.line = (None, None) + logger.warning('message7', location=node) + assert colorize('red', 'WARNING: message7') in warning.getvalue() + + +def test_suppress_logging(app, status, warning): + logging.setup(app, status, warning) + logger = logging.getLogger(__name__) + + logger.warning('message1') + with logging.suppress_logging(): + logger.warning('message2') + assert 'WARNING: message1' in warning.getvalue() + assert 'WARNING: message2' not in warning.getvalue() + + assert 'WARNING: message1' in warning.getvalue() + assert 'WARNING: message2' not in warning.getvalue() + + +def test_pending_warnings(app, status, warning): + logging.setup(app, status, warning) + logger = logging.getLogger(__name__) + + logger.warning('message1') + with logging.pending_warnings(): + # not logged yet (buffered) in here + logger.warning('message2') + logger.warning('message3') + assert 'WARNING: message1' in warning.getvalue() + assert 'WARNING: message2' not in warning.getvalue() + assert 'WARNING: message3' not in warning.getvalue() + + # actually logged as ordered + assert 'WARNING: message2\nWARNING: message3' in strip_escseq(warning.getvalue()) + + +def test_colored_logs(app, status, warning): + app.verbosity = 2 + logging.setup(app, status, warning) + logger = logging.getLogger(__name__) + + # default colors + logger.debug('message1') + logger.verbose('message2') + logger.info('message3') + logger.warning('message4') + logger.critical('message5') + logger.error('message6') + + assert colorize('darkgray', 'message1') in status.getvalue() + assert 'message2\n' in status.getvalue() # not colored + assert 'message3\n' in status.getvalue() # not colored + assert colorize('red', 'WARNING: message4') in warning.getvalue() + assert 'CRITICAL: message5\n' in warning.getvalue() # not colored + assert colorize('darkred', 'ERROR: message6') in warning.getvalue() + + # color specification + logger.debug('message7', color='white') + logger.info('message8', color='red') + assert colorize('white', 'message7') in status.getvalue() + assert colorize('red', 'message8') in status.getvalue() + + +@pytest.mark.xfail(os.name != 'posix', + reason="Parallel mode does not work on Windows") +def test_logging_in_ParallelTasks(app, status, warning): + logging.setup(app, status, warning) + logger = logging.getLogger(__name__) + + def child_process(): + logger.info('message1') + logger.warning('message2', location='index') + + tasks = ParallelTasks(1) + tasks.add_task(child_process) + tasks.join() + assert 'message1' in status.getvalue() + assert 'index.txt: WARNING: message2' in warning.getvalue() + + +def test_output_with_unencodable_char(app, status, warning): + class StreamWriter(codecs.StreamWriter): + def write(self, object): + self.stream.write(object.encode('cp1252').decode('cp1252')) + + logging.setup(app, StreamWriter(status), warning) + logger = logging.getLogger(__name__) + + # info with UnicodeEncodeError + status.truncate(0) + status.seek(0) + logger.info("unicode \u206d...") + assert status.getvalue() == "unicode ?...\n" + + +def test_skip_warningiserror(app, status, warning): + logging.setup(app, status, warning) + logger = logging.getLogger(__name__) + + app.warningiserror = True + with logging.skip_warningiserror(): + logger.warning('message') + + # if False, warning raises SphinxWarning exception + with logging.skip_warningiserror(False): # NoQA: SIM117 + with pytest.raises(SphinxWarning): + logger.warning('message') + + # It also works during pending_warnings. + with logging.pending_warnings(): # NoQA: SIM117 + with logging.skip_warningiserror(): + logger.warning('message') + + with pytest.raises(SphinxWarning): # NoQA: PT012,SIM117 + with logging.pending_warnings(): + with logging.skip_warningiserror(False): + logger.warning('message') + + +def test_prefixed_warnings(app, status, warning): + logging.setup(app, status, warning) + logger = logging.getLogger(__name__) + + logger.warning('message1') + with prefixed_warnings('PREFIX:'): + logger.warning('message2') + with prefixed_warnings('Another PREFIX:'): + logger.warning('message3') + logger.warning('message4') + logger.warning('message5') + + assert 'WARNING: message1' in warning.getvalue() + assert 'WARNING: PREFIX: message2' in warning.getvalue() + assert 'WARNING: Another PREFIX: message3' in warning.getvalue() + assert 'WARNING: PREFIX: message4' in warning.getvalue() + assert 'WARNING: message5' in warning.getvalue() + + +def test_get_node_location_abspath(): + # Ensure that node locations are reported as an absolute path, + # even if the source attribute is a relative path. + + relative_filename = os.path.join('relative', 'path.txt') + absolute_filename = osutil.abspath(relative_filename) + + n = nodes.Node() + n.source = relative_filename + + location = logging.get_node_location(n) + + assert location == absolute_filename + ':' diff --git a/tests/test_util_matching.py b/tests/test_util_matching.py new file mode 100644 index 0000000..7d865ba --- /dev/null +++ b/tests/test_util_matching.py @@ -0,0 +1,174 @@ +"""Tests sphinx.util.matching functions.""" +from sphinx.util.matching import Matcher, compile_matchers, get_matching_files + + +def test_compile_matchers(): + # exact matching + pat = compile_matchers(['hello.py']).pop() + assert pat('hello.py') + assert not pat('hello-py') + assert not pat('subdir/hello.py') + + # wild card (*) + pat = compile_matchers(['hello.*']).pop() + assert pat('hello.py') + assert pat('hello.rst') + + pat = compile_matchers(['*.py']).pop() + assert pat('hello.py') + assert pat('world.py') + assert not pat('subdir/hello.py') + + # wild card (**) + pat = compile_matchers(['hello.**']).pop() + assert pat('hello.py') + assert pat('hello.rst') + assert pat('hello.py/world.py') + + pat = compile_matchers(['**.py']).pop() + assert pat('hello.py') + assert pat('world.py') + assert pat('subdir/hello.py') + + pat = compile_matchers(['**/hello.py']).pop() + assert not pat('hello.py') + assert pat('subdir/hello.py') + assert pat('subdir/subdir/hello.py') + + # wild card (?) + pat = compile_matchers(['hello.?']).pop() + assert pat('hello.c') + assert not pat('hello.py') + + # pattern ([...]) + pat = compile_matchers(['hello[12\\].py']).pop() + assert pat('hello1.py') + assert pat('hello2.py') + assert pat('hello\\.py') + assert not pat('hello3.py') + + pat = compile_matchers(['hello[^12].py']).pop() # "^" is not negative identifier + assert pat('hello1.py') + assert pat('hello2.py') + assert pat('hello^.py') + assert not pat('hello3.py') + + # negative pattern ([!...]) + pat = compile_matchers(['hello[!12].py']).pop() + assert not pat('hello1.py') + assert not pat('hello2.py') + assert not pat('hello/.py') # negative pattern does not match to "/" + assert pat('hello3.py') + + # non patterns + pat = compile_matchers(['hello[.py']).pop() + assert pat('hello[.py') + assert not pat('hello.py') + + pat = compile_matchers(['hello[].py']).pop() + assert pat('hello[].py') + assert not pat('hello.py') + + pat = compile_matchers(['hello[!].py']).pop() + assert pat('hello[!].py') + assert not pat('hello.py') + + +def test_Matcher(): + matcher = Matcher(['hello.py', '**/world.py']) + assert matcher('hello.py') + assert not matcher('subdir/hello.py') + assert matcher('world.py') + assert matcher('subdir/world.py') + + +def test_get_matching_files_all(rootdir): + files = get_matching_files(rootdir / "test-root") + assert sorted(files) == [ + 'Makefile', '_templates/contentssb.html', '_templates/customsb.html', + '_templates/layout.html', 'autodoc.txt', 'autodoc_target.py', 'bom.txt', 'conf.py', + 'extapi.txt', 'extensions.txt', 'file_with_special_#_chars.xyz', 'footnote.txt', + 'images.txt', 'img.foo.png', 'img.gif', 'img.pdf', 'img.png', 'includes.txt', + 'index.txt', 'lists.txt', 'literal.inc', 'literal_orig.inc', 'markup.txt', 'math.txt', + 'objects.txt', 'otherext.foo', 'parsermod.py', 'quotes.inc', 'rimg.png', + 'special/api.h', 'special/code.py', 'subdir/excluded.txt', 'subdir/images.txt', + 'subdir/img.png', 'subdir/include.inc', 'subdir/includes.txt', 'subdir/simg.png', + 'svgimg.pdf', 'svgimg.svg', 'tabs.inc', 'test.inc', 'wrongenc.inc', + ] + + +def test_get_matching_files_all_exclude_single(rootdir): + files = get_matching_files(rootdir / "test-root", exclude_patterns=["**.html"]) + assert sorted(files) == [ + 'Makefile', 'autodoc.txt', 'autodoc_target.py', 'bom.txt', 'conf.py', + 'extapi.txt', 'extensions.txt', 'file_with_special_#_chars.xyz', 'footnote.txt', + 'images.txt', 'img.foo.png', 'img.gif', 'img.pdf', 'img.png', 'includes.txt', + 'index.txt', 'lists.txt', 'literal.inc', 'literal_orig.inc', 'markup.txt', 'math.txt', + 'objects.txt', 'otherext.foo', 'parsermod.py', 'quotes.inc', 'rimg.png', + 'special/api.h', 'special/code.py', 'subdir/excluded.txt', 'subdir/images.txt', + 'subdir/img.png', 'subdir/include.inc', 'subdir/includes.txt', 'subdir/simg.png', + 'svgimg.pdf', 'svgimg.svg', 'tabs.inc', 'test.inc', 'wrongenc.inc', + ] + + +def test_get_matching_files_all_exclude_multiple(rootdir): + files = get_matching_files(rootdir / "test-root", exclude_patterns=["**.html", "**.inc"]) + assert sorted(files) == [ + 'Makefile', 'autodoc.txt', 'autodoc_target.py', 'bom.txt', 'conf.py', + 'extapi.txt', 'extensions.txt', 'file_with_special_#_chars.xyz', 'footnote.txt', + 'images.txt', 'img.foo.png', 'img.gif', 'img.pdf', 'img.png', 'includes.txt', + 'index.txt', 'lists.txt', 'markup.txt', 'math.txt', 'objects.txt', 'otherext.foo', + 'parsermod.py', 'rimg.png', 'special/api.h', 'special/code.py', 'subdir/excluded.txt', + 'subdir/images.txt', 'subdir/img.png', 'subdir/includes.txt', 'subdir/simg.png', + 'svgimg.pdf', 'svgimg.svg', + ] + + +def test_get_matching_files_all_exclude_nonexistent(rootdir): + files = get_matching_files(rootdir / "test-root", exclude_patterns=["halibut/**"]) + assert sorted(files) == [ + 'Makefile', '_templates/contentssb.html', '_templates/customsb.html', + '_templates/layout.html', 'autodoc.txt', 'autodoc_target.py', 'bom.txt', 'conf.py', + 'extapi.txt', 'extensions.txt', 'file_with_special_#_chars.xyz', 'footnote.txt', + 'images.txt', 'img.foo.png', 'img.gif', 'img.pdf', 'img.png', 'includes.txt', + 'index.txt', 'lists.txt', 'literal.inc', 'literal_orig.inc', 'markup.txt', 'math.txt', + 'objects.txt', 'otherext.foo', 'parsermod.py', 'quotes.inc', 'rimg.png', + 'special/api.h', 'special/code.py', 'subdir/excluded.txt', 'subdir/images.txt', + 'subdir/img.png', 'subdir/include.inc', 'subdir/includes.txt', 'subdir/simg.png', + 'svgimg.pdf', 'svgimg.svg', 'tabs.inc', 'test.inc', 'wrongenc.inc', + ] + + +def test_get_matching_files_all_include_single(rootdir): + files = get_matching_files(rootdir / "test-root", include_patterns=["subdir/**"]) + assert sorted(files) == [ + 'subdir/excluded.txt', 'subdir/images.txt', 'subdir/img.png', 'subdir/include.inc', + 'subdir/includes.txt', 'subdir/simg.png', + ] + + +def test_get_matching_files_all_include_multiple(rootdir): + files = get_matching_files(rootdir / "test-root", include_patterns=["special/**", "subdir/**"]) + assert sorted(files) == [ + 'special/api.h', 'special/code.py', 'subdir/excluded.txt', 'subdir/images.txt', + 'subdir/img.png', 'subdir/include.inc', 'subdir/includes.txt', 'subdir/simg.png', + ] + + +def test_get_matching_files_all_include_nonexistent(rootdir): + files = get_matching_files(rootdir / "test-root", include_patterns=["halibut/**"]) + assert sorted(files) == [] + + +def test_get_matching_files_all_include_prefix(rootdir): + files = get_matching_files(rootdir / "test-root", include_patterns=["autodoc*"]) + assert sorted(files) == [ + 'autodoc.txt', 'autodoc_target.py', + ] + + +def test_get_matching_files_all_include_question_mark(rootdir): + files = get_matching_files(rootdir / "test-root", include_patterns=["img.???"]) + assert sorted(files) == [ + 'img.gif', 'img.pdf', 'img.png', + ] diff --git a/tests/test_util_nodes.py b/tests/test_util_nodes.py new file mode 100644 index 0000000..92e4dc1 --- /dev/null +++ b/tests/test_util_nodes.py @@ -0,0 +1,254 @@ +"""Tests uti.nodes functions.""" +from __future__ import annotations + +import warnings +from textwrap import dedent +from typing import Any + +import pytest +from docutils import frontend, nodes +from docutils.parsers import rst +from docutils.utils import new_document + +from sphinx.transforms import ApplySourceWorkaround +from sphinx.util.nodes import ( + NodeMatcher, + apply_source_workaround, + clean_astext, + extract_messages, + make_id, + split_explicit_title, +) + + +def _transform(doctree): + ApplySourceWorkaround(doctree).apply() + + +def create_new_document(): + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', category=DeprecationWarning) + # DeprecationWarning: The frontend.OptionParser class will be replaced + # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later. + settings = frontend.OptionParser( + components=(rst.Parser,)).get_default_values() + settings.id_prefix = 'id' + document = new_document('dummy.txt', settings) + return document + + +def _get_doctree(text): + document = create_new_document() + rst.Parser().parse(text, document) + _transform(document) + return document + + +def assert_node_count(messages, node_type, expect_count): + count = 0 + node_list = [node for node, msg in messages] + for node in node_list: + if isinstance(node, node_type): + count += 1 + + assert count == expect_count, ( + "Count of %r in the %r is %d instead of %d" + % (node_type, node_list, count, expect_count)) + + +def test_NodeMatcher(): + doctree = nodes.document(None, None) + doctree += nodes.paragraph('', 'Hello') + doctree += nodes.paragraph('', 'Sphinx', block=1) + doctree += nodes.paragraph('', 'World', block=2) + doctree += nodes.literal_block('', 'blah blah blah', block=3) + + # search by node class + matcher = NodeMatcher(nodes.paragraph) + assert len(list(doctree.findall(matcher))) == 3 + + # search by multiple node classes + matcher = NodeMatcher(nodes.paragraph, nodes.literal_block) + assert len(list(doctree.findall(matcher))) == 4 + + # search by node attribute + matcher = NodeMatcher(block=1) + assert len(list(doctree.findall(matcher))) == 1 + + # search by node attribute (Any) + matcher = NodeMatcher(block=Any) + assert len(list(doctree.findall(matcher))) == 3 + + # search by both class and attribute + matcher = NodeMatcher(nodes.paragraph, block=Any) + assert len(list(doctree.findall(matcher))) == 2 + + # mismatched + matcher = NodeMatcher(nodes.title) + assert len(list(doctree.findall(matcher))) == 0 + + # search with Any does not match to Text node + matcher = NodeMatcher(blah=Any) + assert len(list(doctree.findall(matcher))) == 0 + + +@pytest.mark.parametrize( + ('rst', 'node_cls', 'count'), + [ + ( + """ + .. admonition:: admonition title + + admonition body + """, + nodes.title, 1, + ), + ( + """ + .. figure:: foo.jpg + + this is title + """, + nodes.caption, 1, + ), + ( + """ + .. rubric:: spam + """, + nodes.rubric, 1, + ), + ( + """ + | spam + | egg + """, + nodes.line, 2, + ), + ( + """ + section + ======= + + +----------------+ + | | **Title 1** | + | | Message 1 | + +----------------+ + """, + nodes.line, 2, + ), + ( + """ + * | **Title 1** + | Message 1 + """, + nodes.line, 2, + + ), + ], +) +def test_extract_messages(rst, node_cls, count): + msg = extract_messages(_get_doctree(dedent(rst))) + assert_node_count(msg, node_cls, count) + + +def test_extract_messages_without_rawsource(): + """ + Check node.rawsource is fall-backed by using node.astext() value. + + `extract_message` which is used from Sphinx i18n feature drop ``not node.rawsource`` + nodes. So, all nodes which want to translate must have ``rawsource`` value. + However, sometimes node.rawsource is not set. + + For example: recommonmark-0.2.0 doesn't set rawsource to `paragraph` node. + + refs #1994: Fall back to node's astext() during i18n message extraction. + """ + p = nodes.paragraph() + p.append(nodes.Text('test')) + p.append(nodes.Text('sentence')) + assert not p.rawsource # target node must not have rawsource value + document = create_new_document() + document.append(p) + _transform(document) + assert_node_count(extract_messages(document), nodes.TextElement, 1) + assert [m for n, m in extract_messages(document)][0], 'text sentence' + + +def test_clean_astext(): + node = nodes.paragraph(text='hello world') + assert clean_astext(node) == 'hello world' + + node = nodes.image(alt='hello world') + assert clean_astext(node) == '' + + node = nodes.paragraph(text='hello world') + node += nodes.raw('', 'raw text', format='html') + assert clean_astext(node) == 'hello world' + + +@pytest.mark.parametrize( + ('prefix', 'term', 'expected'), + [ + ('', '', 'id0'), + ('term', '', 'term-0'), + ('term', 'Sphinx', 'term-Sphinx'), + ('', 'io.StringIO', 'io.StringIO'), # contains a dot + ('', 'sphinx.setup_command', 'sphinx.setup_command'), # contains a dot & underscore + ('', '_io.StringIO', 'io.StringIO'), # starts with underscore + ('', 'sphinx', 'sphinx'), # alphabets in unicode fullwidth characters + ('', '悠好', 'id0'), # multibytes text (in Chinese) + ('', 'Hello=悠好=こんにちは', 'Hello'), # alphabets and multibytes text + ('', 'fünf', 'funf'), # latin1 (umlaut) + ('', '0sphinx', 'sphinx'), # starts with number + ('', 'sphinx-', 'sphinx'), # ends with hyphen + ]) +def test_make_id(app, prefix, term, expected): + document = create_new_document() + assert make_id(app.env, document, prefix, term) == expected + + +def test_make_id_already_registered(app): + document = create_new_document() + document.ids['term-Sphinx'] = True # register "term-Sphinx" manually + assert make_id(app.env, document, 'term', 'Sphinx') == 'term-0' + + +def test_make_id_sequential(app): + document = create_new_document() + document.ids['term-0'] = True + assert make_id(app.env, document, 'term') == 'term-1' + + +@pytest.mark.parametrize( + ('title', 'expected'), + [ + # implicit + ('hello', (False, 'hello', 'hello')), + # explicit + ('hello <world>', (True, 'hello', 'world')), + # explicit (title having angle brackets) + ('hello <world> <sphinx>', (True, 'hello <world>', 'sphinx')), + ], +) +def test_split_explicit_target(title, expected): + assert expected == split_explicit_title(title) + + +def test_apply_source_workaround_literal_block_no_source(): + """Regression test for #11091. + + Test that apply_source_workaround doesn't raise. + """ + literal_block = nodes.literal_block('', '') + list_item = nodes.list_item('', literal_block) + bullet_list = nodes.bullet_list('', list_item) + + assert literal_block.source is None + assert list_item.source is None + assert bullet_list.source is None + + apply_source_workaround(literal_block) + + assert literal_block.source is None + assert list_item.source is None + assert bullet_list.source is None diff --git a/tests/test_util_rst.py b/tests/test_util_rst.py new file mode 100644 index 0000000..d50c90c --- /dev/null +++ b/tests/test_util_rst.py @@ -0,0 +1,164 @@ +"""Tests sphinx.util.rst functions.""" + +from docutils.statemachine import StringList +from jinja2 import Environment + +from sphinx.util.rst import append_epilog, escape, heading, prepend_prolog, textwidth + + +def test_escape(): + assert escape(':ref:`id`') == r'\:ref\:\`id\`' + assert escape('footnote [#]_') == r'footnote \[\#\]\_' + assert escape('sphinx.application') == r'sphinx.application' + assert escape('.. toctree::') == r'\.. toctree\:\:' + + +def test_append_epilog(app): + epilog = 'this is rst_epilog\ngood-bye reST!' + content = StringList(['hello Sphinx world', + 'Sphinx is a document generator'], + 'dummy.rst') + append_epilog(content, epilog) + + assert list(content.xitems()) == [('dummy.rst', 0, 'hello Sphinx world'), + ('dummy.rst', 1, 'Sphinx is a document generator'), + ('dummy.rst', 2, ''), + ('<rst_epilog>', 0, 'this is rst_epilog'), + ('<rst_epilog>', 1, 'good-bye reST!')] + + +def test_prepend_prolog(app): + prolog = 'this is rst_prolog\nhello reST!' + content = StringList([':title: test of SphinxFileInput', + ':author: Sphinx team', + '', + 'hello Sphinx world', + 'Sphinx is a document generator'], + 'dummy.rst') + prepend_prolog(content, prolog) + + assert list(content.xitems()) == [('dummy.rst', 0, ':title: test of SphinxFileInput'), + ('dummy.rst', 1, ':author: Sphinx team'), + ('<generated>', 0, ''), + ('<rst_prolog>', 0, 'this is rst_prolog'), + ('<rst_prolog>', 1, 'hello reST!'), + ('<generated>', 0, ''), + ('dummy.rst', 2, ''), + ('dummy.rst', 3, 'hello Sphinx world'), + ('dummy.rst', 4, 'Sphinx is a document generator')] + + +def test_prepend_prolog_with_CR(app): + # prolog having CR at tail + prolog = 'this is rst_prolog\nhello reST!\n' + content = StringList(['hello Sphinx world', + 'Sphinx is a document generator'], + 'dummy.rst') + prepend_prolog(content, prolog) + + assert list(content.xitems()) == [('<rst_prolog>', 0, 'this is rst_prolog'), + ('<rst_prolog>', 1, 'hello reST!'), + ('<generated>', 0, ''), + ('dummy.rst', 0, 'hello Sphinx world'), + ('dummy.rst', 1, 'Sphinx is a document generator')] + + +def test_prepend_prolog_without_CR(app): + # prolog not having CR at tail + prolog = 'this is rst_prolog\nhello reST!' + content = StringList(['hello Sphinx world', + 'Sphinx is a document generator'], + 'dummy.rst') + prepend_prolog(content, prolog) + + assert list(content.xitems()) == [('<rst_prolog>', 0, 'this is rst_prolog'), + ('<rst_prolog>', 1, 'hello reST!'), + ('<generated>', 0, ''), + ('dummy.rst', 0, 'hello Sphinx world'), + ('dummy.rst', 1, 'Sphinx is a document generator')] + + +def test_prepend_prolog_with_roles_in_sections(app): + prolog = 'this is rst_prolog\nhello reST!' + content = StringList([':title: test of SphinxFileInput', + ':author: Sphinx team', + '', # this newline is required + ':mod:`foo`', + '----------', + '', + 'hello'], + 'dummy.rst') + prepend_prolog(content, prolog) + + assert list(content.xitems()) == [('dummy.rst', 0, ':title: test of SphinxFileInput'), + ('dummy.rst', 1, ':author: Sphinx team'), + ('<generated>', 0, ''), + ('<rst_prolog>', 0, 'this is rst_prolog'), + ('<rst_prolog>', 1, 'hello reST!'), + ('<generated>', 0, ''), + ('dummy.rst', 2, ''), + ('dummy.rst', 3, ':mod:`foo`'), + ('dummy.rst', 4, '----------'), + ('dummy.rst', 5, ''), + ('dummy.rst', 6, 'hello')] + + +def test_prepend_prolog_with_roles_in_sections_with_newline(app): + # prologue with trailing line break + prolog = 'this is rst_prolog\nhello reST!\n' + content = StringList([':mod:`foo`', '-' * 10, '', 'hello'], 'dummy.rst') + prepend_prolog(content, prolog) + + assert list(content.xitems()) == [('<rst_prolog>', 0, 'this is rst_prolog'), + ('<rst_prolog>', 1, 'hello reST!'), + ('<generated>', 0, ''), + ('dummy.rst', 0, ':mod:`foo`'), + ('dummy.rst', 1, '----------'), + ('dummy.rst', 2, ''), + ('dummy.rst', 3, 'hello')] + + +def test_prepend_prolog_with_roles_in_sections_without_newline(app): + # prologue with no trailing line break + prolog = 'this is rst_prolog\nhello reST!' + content = StringList([':mod:`foo`', '-' * 10, '', 'hello'], 'dummy.rst') + prepend_prolog(content, prolog) + + assert list(content.xitems()) == [('<rst_prolog>', 0, 'this is rst_prolog'), + ('<rst_prolog>', 1, 'hello reST!'), + ('<generated>', 0, ''), + ('dummy.rst', 0, ':mod:`foo`'), + ('dummy.rst', 1, '----------'), + ('dummy.rst', 2, ''), + ('dummy.rst', 3, 'hello')] + + +def test_textwidth(): + assert textwidth('Hello') == 5 + assert textwidth('русский язык') == 12 + assert textwidth('русский язык', 'WFA') == 23 # Cyrillic are ambiguous chars + + +def test_heading(): + env = Environment() + env.extend(language=None) + + assert heading(env, 'Hello') == ('Hello\n' + '=====') + assert heading(env, 'Hello', 1) == ('Hello\n' + '=====') + assert heading(env, 'Hello', 2) == ('Hello\n' + '-----') + assert heading(env, 'Hello', 3) == ('Hello\n' + '~~~~~') + assert heading(env, 'русский язык', 1) == ( + 'русский язык\n' + '============' + ) + + # language=ja: ambiguous + env.language = 'ja' + assert heading(env, 'русский язык', 1) == ( + 'русский язык\n' + '=======================' + ) diff --git a/tests/test_util_template.py b/tests/test_util_template.py new file mode 100644 index 0000000..4601179 --- /dev/null +++ b/tests/test_util_template.py @@ -0,0 +1,29 @@ +"""Tests sphinx.util.template functions.""" + +from sphinx.util.template import ReSTRenderer + + +def test_ReSTRenderer_escape(): + r = ReSTRenderer() + template = '{{ "*hello*" | e }}' + assert r.render_string(template, {}) == r'\*hello\*' + + +def test_ReSTRenderer_heading(): + r = ReSTRenderer() + + template = '{{ "hello" | heading }}' + assert r.render_string(template, {}) == 'hello\n=====' + + template = '{{ "hello" | heading(1) }}' + assert r.render_string(template, {}) == 'hello\n=====' + + template = '{{ "русский язык" | heading(2) }}' + assert r.render_string(template, {}) == ('русский язык\n' + '------------') + + # language: ja + r.env.language = 'ja' + template = '{{ "русский язык" | heading }}' + assert r.render_string(template, {}) == ('русский язык\n' + '=======================') diff --git a/tests/test_util_typing.py b/tests/test_util_typing.py new file mode 100644 index 0000000..d79852e --- /dev/null +++ b/tests/test_util_typing.py @@ -0,0 +1,536 @@ +"""Tests util.typing functions.""" + +import sys +from enum import Enum +from numbers import Integral +from struct import Struct +from types import TracebackType +from typing import ( + Any, + Callable, + Dict, + Generator, + List, + NewType, + Optional, + Tuple, + TypeVar, + Union, +) + +import pytest + +from sphinx.ext.autodoc import mock +from sphinx.util.typing import INVALID_BUILTIN_CLASSES, restify, stringify_annotation + + +class MyClass1: + pass + + +class MyClass2(MyClass1): + __qualname__ = '<MyClass2>' + + +class MyEnum(Enum): + a = 1 + + +T = TypeVar('T') +MyInt = NewType('MyInt', int) + + +class MyList(List[T]): + pass + + +class BrokenType: + __args__ = int + + +def test_restify(): + assert restify(int) == ":py:class:`int`" + assert restify(int, "smart") == ":py:class:`int`" + + assert restify(str) == ":py:class:`str`" + assert restify(str, "smart") == ":py:class:`str`" + + assert restify(None) == ":py:obj:`None`" + assert restify(None, "smart") == ":py:obj:`None`" + + assert restify(Integral) == ":py:class:`numbers.Integral`" + assert restify(Integral, "smart") == ":py:class:`~numbers.Integral`" + + assert restify(Struct) == ":py:class:`struct.Struct`" + assert restify(Struct, "smart") == ":py:class:`~struct.Struct`" + + assert restify(TracebackType) == ":py:class:`types.TracebackType`" + assert restify(TracebackType, "smart") == ":py:class:`~types.TracebackType`" + + assert restify(Any) == ":py:obj:`~typing.Any`" + assert restify(Any, "smart") == ":py:obj:`~typing.Any`" + + assert restify('str') == "str" + assert restify('str', "smart") == "str" + + +def test_is_invalid_builtin_class(): + # if these tests start failing, it means that the __module__ + # of one of these classes has changed, and INVALID_BUILTIN_CLASSES + # in sphinx.util.typing needs to be updated. + assert INVALID_BUILTIN_CLASSES.keys() == {Struct, TracebackType} + assert Struct.__module__ == '_struct' + assert TracebackType.__module__ == 'builtins' + + +def test_restify_type_hints_containers(): + assert restify(List) == ":py:class:`~typing.List`" + assert restify(Dict) == ":py:class:`~typing.Dict`" + assert restify(List[int]) == ":py:class:`~typing.List`\\ [:py:class:`int`]" + assert restify(List[str]) == ":py:class:`~typing.List`\\ [:py:class:`str`]" + assert restify(Dict[str, float]) == (":py:class:`~typing.Dict`\\ " + "[:py:class:`str`, :py:class:`float`]") + assert restify(Tuple[str, str, str]) == (":py:class:`~typing.Tuple`\\ " + "[:py:class:`str`, :py:class:`str`, " + ":py:class:`str`]") + assert restify(Tuple[str, ...]) == ":py:class:`~typing.Tuple`\\ [:py:class:`str`, ...]" + + if sys.version_info[:2] <= (3, 10): + assert restify(Tuple[()]) == ":py:class:`~typing.Tuple`\\ [()]" + else: + assert restify(Tuple[()]) == ":py:class:`~typing.Tuple`" + + assert restify(List[Dict[str, Tuple]]) == (":py:class:`~typing.List`\\ " + "[:py:class:`~typing.Dict`\\ " + "[:py:class:`str`, :py:class:`~typing.Tuple`]]") + assert restify(MyList[Tuple[int, int]]) == (":py:class:`tests.test_util_typing.MyList`\\ " + "[:py:class:`~typing.Tuple`\\ " + "[:py:class:`int`, :py:class:`int`]]") + assert restify(Generator[None, None, None]) == (":py:class:`~typing.Generator`\\ " + "[:py:obj:`None`, :py:obj:`None`, " + ":py:obj:`None`]") + + +def test_restify_type_hints_Callable(): + assert restify(Callable) == ":py:class:`~typing.Callable`" + + assert restify(Callable[[str], int]) == (":py:class:`~typing.Callable`\\ " + "[[:py:class:`str`], :py:class:`int`]") + assert restify(Callable[..., int]) == (":py:class:`~typing.Callable`\\ " + "[[...], :py:class:`int`]") + + +def test_restify_type_hints_Union(): + assert restify(Optional[int]) == ":py:obj:`~typing.Optional`\\ [:py:class:`int`]" + assert restify(Union[str, None]) == ":py:obj:`~typing.Optional`\\ [:py:class:`str`]" + assert restify(Union[int, str]) == (":py:obj:`~typing.Union`\\ " + "[:py:class:`int`, :py:class:`str`]") + assert restify(Union[int, Integral]) == (":py:obj:`~typing.Union`\\ " + "[:py:class:`int`, :py:class:`numbers.Integral`]") + assert restify(Union[int, Integral], "smart") == (":py:obj:`~typing.Union`\\ " + "[:py:class:`int`," + " :py:class:`~numbers.Integral`]") + + assert (restify(Union[MyClass1, MyClass2]) == + (":py:obj:`~typing.Union`\\ " + "[:py:class:`tests.test_util_typing.MyClass1`, " + ":py:class:`tests.test_util_typing.<MyClass2>`]")) + assert (restify(Union[MyClass1, MyClass2], "smart") == + (":py:obj:`~typing.Union`\\ " + "[:py:class:`~tests.test_util_typing.MyClass1`," + " :py:class:`~tests.test_util_typing.<MyClass2>`]")) + + +def test_restify_type_hints_typevars(): + T = TypeVar('T') + T_co = TypeVar('T_co', covariant=True) + T_contra = TypeVar('T_contra', contravariant=True) + + assert restify(T) == ":py:obj:`tests.test_util_typing.T`" + assert restify(T, "smart") == ":py:obj:`~tests.test_util_typing.T`" + + assert restify(T_co) == ":py:obj:`tests.test_util_typing.T_co`" + assert restify(T_co, "smart") == ":py:obj:`~tests.test_util_typing.T_co`" + + assert restify(T_contra) == ":py:obj:`tests.test_util_typing.T_contra`" + assert restify(T_contra, "smart") == ":py:obj:`~tests.test_util_typing.T_contra`" + + assert restify(List[T]) == ":py:class:`~typing.List`\\ [:py:obj:`tests.test_util_typing.T`]" + assert restify(List[T], "smart") == ":py:class:`~typing.List`\\ [:py:obj:`~tests.test_util_typing.T`]" + + assert restify(list[T]) == ":py:class:`list`\\ [:py:obj:`tests.test_util_typing.T`]" + assert restify(list[T], "smart") == ":py:class:`list`\\ [:py:obj:`~tests.test_util_typing.T`]" + + if sys.version_info[:2] >= (3, 10): + assert restify(MyInt) == ":py:class:`tests.test_util_typing.MyInt`" + assert restify(MyInt, "smart") == ":py:class:`~tests.test_util_typing.MyInt`" + else: + assert restify(MyInt) == ":py:class:`MyInt`" + assert restify(MyInt, "smart") == ":py:class:`MyInt`" + + +def test_restify_type_hints_custom_class(): + assert restify(MyClass1) == ":py:class:`tests.test_util_typing.MyClass1`" + assert restify(MyClass1, "smart") == ":py:class:`~tests.test_util_typing.MyClass1`" + + assert restify(MyClass2) == ":py:class:`tests.test_util_typing.<MyClass2>`" + assert restify(MyClass2, "smart") == ":py:class:`~tests.test_util_typing.<MyClass2>`" + + +def test_restify_type_hints_alias(): + MyStr = str + MyTypingTuple = Tuple[str, str] + MyTuple = tuple[str, str] + assert restify(MyStr) == ":py:class:`str`" + assert restify(MyTypingTuple) == ":py:class:`~typing.Tuple`\\ [:py:class:`str`, :py:class:`str`]" + assert restify(MyTuple) == ":py:class:`tuple`\\ [:py:class:`str`, :py:class:`str`]" + + +def test_restify_type_ForwardRef(): + from typing import ForwardRef # type: ignore[attr-defined] + assert restify(ForwardRef("MyInt")) == ":py:class:`MyInt`" + + assert restify(list[ForwardRef("MyInt")]) == ":py:class:`list`\\ [:py:class:`MyInt`]" + + assert restify(Tuple[dict[ForwardRef("MyInt"), str], list[List[int]]]) == ":py:class:`~typing.Tuple`\\ [:py:class:`dict`\\ [:py:class:`MyInt`, :py:class:`str`], :py:class:`list`\\ [:py:class:`~typing.List`\\ [:py:class:`int`]]]" # type: ignore[attr-defined] + + +def test_restify_type_Literal(): + from typing import Literal # type: ignore[attr-defined] + assert restify(Literal[1, "2", "\r"]) == ":py:obj:`~typing.Literal`\\ [1, '2', '\\r']" + + assert restify(Literal[MyEnum.a], 'fully-qualified-except-typing') == ':py:obj:`~typing.Literal`\\ [:py:attr:`tests.test_util_typing.MyEnum.a`]' + assert restify(Literal[MyEnum.a], 'smart') == ':py:obj:`~typing.Literal`\\ [:py:attr:`~tests.test_util_typing.MyEnum.a`]' + + +def test_restify_pep_585(): + assert restify(list[str]) == ":py:class:`list`\\ [:py:class:`str`]" # type: ignore[attr-defined] + assert restify(dict[str, str]) == (":py:class:`dict`\\ " # type: ignore[attr-defined] + "[:py:class:`str`, :py:class:`str`]") + assert restify(tuple[str, ...]) == ":py:class:`tuple`\\ [:py:class:`str`, ...]" + assert restify(tuple[str, str, str]) == (":py:class:`tuple`\\ " + "[:py:class:`str`, :py:class:`str`, " + ":py:class:`str`]") + assert restify(dict[str, tuple[int, ...]]) == (":py:class:`dict`\\ " # type: ignore[attr-defined] + "[:py:class:`str`, :py:class:`tuple`\\ " + "[:py:class:`int`, ...]]") + + assert restify(tuple[()]) == ":py:class:`tuple`\\ [()]" + + # Mix old typing with PEP 585 + assert restify(List[dict[str, Tuple[str, ...]]]) == (":py:class:`~typing.List`\\ " + "[:py:class:`dict`\\ " + "[:py:class:`str`, :py:class:`~typing.Tuple`\\ " + "[:py:class:`str`, ...]]]") + assert restify(tuple[MyList[list[int]], int]) == (":py:class:`tuple`\\ [" + ":py:class:`tests.test_util_typing.MyList`\\ " + "[:py:class:`list`\\ [:py:class:`int`]], " + ":py:class:`int`]") + + +@pytest.mark.skipif(sys.version_info[:2] <= (3, 9), reason='python 3.10+ is required.') +def test_restify_type_union_operator(): + assert restify(int | None) == ":py:class:`int` | :py:obj:`None`" # type: ignore[attr-defined] + assert restify(int | str) == ":py:class:`int` | :py:class:`str`" # type: ignore[attr-defined] + assert restify(int | str | None) == (":py:class:`int` | :py:class:`str` | " # type: ignore[attr-defined] + ":py:obj:`None`") + + +def test_restify_broken_type_hints(): + assert restify(BrokenType) == ':py:class:`tests.test_util_typing.BrokenType`' + assert restify(BrokenType, "smart") == ':py:class:`~tests.test_util_typing.BrokenType`' + + +def test_restify_mock(): + with mock(['unknown']): + import unknown + assert restify(unknown) == ':py:class:`unknown`' + assert restify(unknown.secret.Class) == ':py:class:`unknown.secret.Class`' + assert restify(unknown.secret.Class, "smart") == ':py:class:`~unknown.secret.Class`' + + +def test_stringify_annotation(): + assert stringify_annotation(int, 'fully-qualified-except-typing') == "int" + assert stringify_annotation(int, "smart") == "int" + + assert stringify_annotation(str, 'fully-qualified-except-typing') == "str" + assert stringify_annotation(str, "smart") == "str" + + assert stringify_annotation(None, 'fully-qualified-except-typing') == "None" + assert stringify_annotation(None, "smart") == "None" + + assert stringify_annotation(Integral, 'fully-qualified-except-typing') == "numbers.Integral" + assert stringify_annotation(Integral, "smart") == "~numbers.Integral" + + assert stringify_annotation(Struct, 'fully-qualified-except-typing') == "struct.Struct" + assert stringify_annotation(Struct, "smart") == "~struct.Struct" + + assert stringify_annotation(TracebackType, 'fully-qualified-except-typing') == "types.TracebackType" + assert stringify_annotation(TracebackType, "smart") == "~types.TracebackType" + + assert stringify_annotation(Any, 'fully-qualified-except-typing') == "Any" + assert stringify_annotation(Any, "fully-qualified") == "typing.Any" + assert stringify_annotation(Any, "smart") == "~typing.Any" + + +def test_stringify_type_hints_containers(): + assert stringify_annotation(List, 'fully-qualified-except-typing') == "List" + assert stringify_annotation(List, "fully-qualified") == "typing.List" + assert stringify_annotation(List, "smart") == "~typing.List" + + assert stringify_annotation(Dict, 'fully-qualified-except-typing') == "Dict" + assert stringify_annotation(Dict, "fully-qualified") == "typing.Dict" + assert stringify_annotation(Dict, "smart") == "~typing.Dict" + + assert stringify_annotation(List[int], 'fully-qualified-except-typing') == "List[int]" + assert stringify_annotation(List[int], "fully-qualified") == "typing.List[int]" + assert stringify_annotation(List[int], "smart") == "~typing.List[int]" + + assert stringify_annotation(List[str], 'fully-qualified-except-typing') == "List[str]" + assert stringify_annotation(List[str], "fully-qualified") == "typing.List[str]" + assert stringify_annotation(List[str], "smart") == "~typing.List[str]" + + assert stringify_annotation(Dict[str, float], 'fully-qualified-except-typing') == "Dict[str, float]" + assert stringify_annotation(Dict[str, float], "fully-qualified") == "typing.Dict[str, float]" + assert stringify_annotation(Dict[str, float], "smart") == "~typing.Dict[str, float]" + + assert stringify_annotation(Tuple[str, str, str], 'fully-qualified-except-typing') == "Tuple[str, str, str]" + assert stringify_annotation(Tuple[str, str, str], "fully-qualified") == "typing.Tuple[str, str, str]" + assert stringify_annotation(Tuple[str, str, str], "smart") == "~typing.Tuple[str, str, str]" + + assert stringify_annotation(Tuple[str, ...], 'fully-qualified-except-typing') == "Tuple[str, ...]" + assert stringify_annotation(Tuple[str, ...], "fully-qualified") == "typing.Tuple[str, ...]" + assert stringify_annotation(Tuple[str, ...], "smart") == "~typing.Tuple[str, ...]" + + if sys.version_info[:2] <= (3, 10): + assert stringify_annotation(Tuple[()], 'fully-qualified-except-typing') == "Tuple[()]" + assert stringify_annotation(Tuple[()], "fully-qualified") == "typing.Tuple[()]" + assert stringify_annotation(Tuple[()], "smart") == "~typing.Tuple[()]" + else: + assert stringify_annotation(Tuple[()], 'fully-qualified-except-typing') == "Tuple" + assert stringify_annotation(Tuple[()], "fully-qualified") == "typing.Tuple" + assert stringify_annotation(Tuple[()], "smart") == "~typing.Tuple" + + assert stringify_annotation(List[Dict[str, Tuple]], 'fully-qualified-except-typing') == "List[Dict[str, Tuple]]" + assert stringify_annotation(List[Dict[str, Tuple]], "fully-qualified") == "typing.List[typing.Dict[str, typing.Tuple]]" + assert stringify_annotation(List[Dict[str, Tuple]], "smart") == "~typing.List[~typing.Dict[str, ~typing.Tuple]]" + + assert stringify_annotation(MyList[Tuple[int, int]], 'fully-qualified-except-typing') == "tests.test_util_typing.MyList[Tuple[int, int]]" + assert stringify_annotation(MyList[Tuple[int, int]], "fully-qualified") == "tests.test_util_typing.MyList[typing.Tuple[int, int]]" + assert stringify_annotation(MyList[Tuple[int, int]], "smart") == "~tests.test_util_typing.MyList[~typing.Tuple[int, int]]" + + assert stringify_annotation(Generator[None, None, None], 'fully-qualified-except-typing') == "Generator[None, None, None]" + assert stringify_annotation(Generator[None, None, None], "fully-qualified") == "typing.Generator[None, None, None]" + assert stringify_annotation(Generator[None, None, None], "smart") == "~typing.Generator[None, None, None]" + + +def test_stringify_type_hints_pep_585(): + assert stringify_annotation(list[int], 'fully-qualified-except-typing') == "list[int]" + assert stringify_annotation(list[int], "smart") == "list[int]" + + assert stringify_annotation(list[str], 'fully-qualified-except-typing') == "list[str]" + assert stringify_annotation(list[str], "smart") == "list[str]" + + assert stringify_annotation(dict[str, float], 'fully-qualified-except-typing') == "dict[str, float]" + assert stringify_annotation(dict[str, float], "smart") == "dict[str, float]" + + assert stringify_annotation(tuple[str, str, str], 'fully-qualified-except-typing') == "tuple[str, str, str]" + assert stringify_annotation(tuple[str, str, str], "smart") == "tuple[str, str, str]" + + assert stringify_annotation(tuple[str, ...], 'fully-qualified-except-typing') == "tuple[str, ...]" + assert stringify_annotation(tuple[str, ...], "smart") == "tuple[str, ...]" + + assert stringify_annotation(tuple[()], 'fully-qualified-except-typing') == "tuple[()]" + assert stringify_annotation(tuple[()], "smart") == "tuple[()]" + + assert stringify_annotation(list[dict[str, tuple]], 'fully-qualified-except-typing') == "list[dict[str, tuple]]" + assert stringify_annotation(list[dict[str, tuple]], "smart") == "list[dict[str, tuple]]" + + assert stringify_annotation(MyList[tuple[int, int]], 'fully-qualified-except-typing') == "tests.test_util_typing.MyList[tuple[int, int]]" + assert stringify_annotation(MyList[tuple[int, int]], "fully-qualified") == "tests.test_util_typing.MyList[tuple[int, int]]" + assert stringify_annotation(MyList[tuple[int, int]], "smart") == "~tests.test_util_typing.MyList[tuple[int, int]]" + + assert stringify_annotation(type[int], 'fully-qualified-except-typing') == "type[int]" + assert stringify_annotation(type[int], "smart") == "type[int]" + + # Mix typing and pep 585 + assert stringify_annotation(tuple[List[dict[int, str]], str, ...], 'fully-qualified-except-typing') == "tuple[List[dict[int, str]], str, ...]" + assert stringify_annotation(tuple[List[dict[int, str]], str, ...], "smart") == "tuple[~typing.List[dict[int, str]], str, ...]" + + +def test_stringify_Annotated(): + from typing import Annotated # type: ignore[attr-defined] + assert stringify_annotation(Annotated[str, "foo", "bar"], 'fully-qualified-except-typing') == "str" + assert stringify_annotation(Annotated[str, "foo", "bar"], "smart") == "str" + + +def test_stringify_type_hints_string(): + assert stringify_annotation("int", 'fully-qualified-except-typing') == "int" + assert stringify_annotation("int", 'fully-qualified') == "int" + assert stringify_annotation("int", "smart") == "int" + + assert stringify_annotation("str", 'fully-qualified-except-typing') == "str" + assert stringify_annotation("str", 'fully-qualified') == "str" + assert stringify_annotation("str", "smart") == "str" + + assert stringify_annotation(List["int"], 'fully-qualified-except-typing') == "List[int]" + assert stringify_annotation(List["int"], 'fully-qualified') == "typing.List[int]" + assert stringify_annotation(List["int"], "smart") == "~typing.List[int]" + + assert stringify_annotation(list["int"], 'fully-qualified-except-typing') == "list[int]" + assert stringify_annotation(list["int"], 'fully-qualified') == "list[int]" + assert stringify_annotation(list["int"], "smart") == "list[int]" + + assert stringify_annotation("Tuple[str]", 'fully-qualified-except-typing') == "Tuple[str]" + assert stringify_annotation("Tuple[str]", 'fully-qualified') == "Tuple[str]" + assert stringify_annotation("Tuple[str]", "smart") == "Tuple[str]" + + assert stringify_annotation("tuple[str]", 'fully-qualified-except-typing') == "tuple[str]" + assert stringify_annotation("tuple[str]", 'fully-qualified') == "tuple[str]" + assert stringify_annotation("tuple[str]", "smart") == "tuple[str]" + + assert stringify_annotation("unknown", 'fully-qualified-except-typing') == "unknown" + assert stringify_annotation("unknown", 'fully-qualified') == "unknown" + assert stringify_annotation("unknown", "smart") == "unknown" + + +def test_stringify_type_hints_Callable(): + assert stringify_annotation(Callable, 'fully-qualified-except-typing') == "Callable" + assert stringify_annotation(Callable, "fully-qualified") == "typing.Callable" + assert stringify_annotation(Callable, "smart") == "~typing.Callable" + + assert stringify_annotation(Callable[[str], int], 'fully-qualified-except-typing') == "Callable[[str], int]" + assert stringify_annotation(Callable[[str], int], "fully-qualified") == "typing.Callable[[str], int]" + assert stringify_annotation(Callable[[str], int], "smart") == "~typing.Callable[[str], int]" + + assert stringify_annotation(Callable[..., int], 'fully-qualified-except-typing') == "Callable[[...], int]" + assert stringify_annotation(Callable[..., int], "fully-qualified") == "typing.Callable[[...], int]" + assert stringify_annotation(Callable[..., int], "smart") == "~typing.Callable[[...], int]" + + +def test_stringify_type_hints_Union(): + assert stringify_annotation(Optional[int], 'fully-qualified-except-typing') == "int | None" + assert stringify_annotation(Optional[int], "fully-qualified") == "int | None" + assert stringify_annotation(Optional[int], "smart") == "int | None" + + assert stringify_annotation(Union[str, None], 'fully-qualified-except-typing') == "str | None" + assert stringify_annotation(Union[str, None], "fully-qualified") == "str | None" + assert stringify_annotation(Union[str, None], "smart") == "str | None" + + assert stringify_annotation(Union[int, str], 'fully-qualified-except-typing') == "int | str" + assert stringify_annotation(Union[int, str], "fully-qualified") == "int | str" + assert stringify_annotation(Union[int, str], "smart") == "int | str" + + assert stringify_annotation(Union[int, Integral], 'fully-qualified-except-typing') == "int | numbers.Integral" + assert stringify_annotation(Union[int, Integral], "fully-qualified") == "int | numbers.Integral" + assert stringify_annotation(Union[int, Integral], "smart") == "int | ~numbers.Integral" + + assert (stringify_annotation(Union[MyClass1, MyClass2], 'fully-qualified-except-typing') == + "tests.test_util_typing.MyClass1 | tests.test_util_typing.<MyClass2>") + assert (stringify_annotation(Union[MyClass1, MyClass2], "fully-qualified") == + "tests.test_util_typing.MyClass1 | tests.test_util_typing.<MyClass2>") + assert (stringify_annotation(Union[MyClass1, MyClass2], "smart") == + "~tests.test_util_typing.MyClass1 | ~tests.test_util_typing.<MyClass2>") + + +def test_stringify_type_hints_typevars(): + T = TypeVar('T') + T_co = TypeVar('T_co', covariant=True) + T_contra = TypeVar('T_contra', contravariant=True) + + assert stringify_annotation(T, 'fully-qualified-except-typing') == "tests.test_util_typing.T" + assert stringify_annotation(T, "smart") == "~tests.test_util_typing.T" + + assert stringify_annotation(T_co, 'fully-qualified-except-typing') == "tests.test_util_typing.T_co" + assert stringify_annotation(T_co, "smart") == "~tests.test_util_typing.T_co" + + assert stringify_annotation(T_contra, 'fully-qualified-except-typing') == "tests.test_util_typing.T_contra" + assert stringify_annotation(T_contra, "smart") == "~tests.test_util_typing.T_contra" + + assert stringify_annotation(List[T], 'fully-qualified-except-typing') == "List[tests.test_util_typing.T]" + assert stringify_annotation(List[T], "smart") == "~typing.List[~tests.test_util_typing.T]" + + assert stringify_annotation(list[T], 'fully-qualified-except-typing') == "list[tests.test_util_typing.T]" + assert stringify_annotation(list[T], "smart") == "list[~tests.test_util_typing.T]" + + if sys.version_info[:2] >= (3, 10): + assert stringify_annotation(MyInt, 'fully-qualified-except-typing') == "tests.test_util_typing.MyInt" + assert stringify_annotation(MyInt, "smart") == "~tests.test_util_typing.MyInt" + else: + assert stringify_annotation(MyInt, 'fully-qualified-except-typing') == "MyInt" + assert stringify_annotation(MyInt, "smart") == "MyInt" + + +def test_stringify_type_hints_custom_class(): + assert stringify_annotation(MyClass1, 'fully-qualified-except-typing') == "tests.test_util_typing.MyClass1" + assert stringify_annotation(MyClass1, "smart") == "~tests.test_util_typing.MyClass1" + + assert stringify_annotation(MyClass2, 'fully-qualified-except-typing') == "tests.test_util_typing.<MyClass2>" + assert stringify_annotation(MyClass2, "smart") == "~tests.test_util_typing.<MyClass2>" + + +def test_stringify_type_hints_alias(): + MyStr = str + MyTuple = Tuple[str, str] + + assert stringify_annotation(MyStr, 'fully-qualified-except-typing') == "str" + assert stringify_annotation(MyStr, "smart") == "str" + + assert stringify_annotation(MyTuple) == "Tuple[str, str]" # type: ignore[attr-defined] + assert stringify_annotation(MyTuple, "smart") == "~typing.Tuple[str, str]" # type: ignore[attr-defined] + + +def test_stringify_type_Literal(): + from typing import Literal # type: ignore[attr-defined] + assert stringify_annotation(Literal[1, "2", "\r"], 'fully-qualified-except-typing') == "Literal[1, '2', '\\r']" + assert stringify_annotation(Literal[1, "2", "\r"], "fully-qualified") == "typing.Literal[1, '2', '\\r']" + assert stringify_annotation(Literal[1, "2", "\r"], "smart") == "~typing.Literal[1, '2', '\\r']" + + assert stringify_annotation(Literal[MyEnum.a], 'fully-qualified-except-typing') == 'Literal[tests.test_util_typing.MyEnum.a]' + assert stringify_annotation(Literal[MyEnum.a], 'fully-qualified') == 'typing.Literal[tests.test_util_typing.MyEnum.a]' + assert stringify_annotation(Literal[MyEnum.a], 'smart') == '~typing.Literal[MyEnum.a]' + + +@pytest.mark.skipif(sys.version_info[:2] <= (3, 9), reason='python 3.10+ is required.') +def test_stringify_type_union_operator(): + assert stringify_annotation(int | None) == "int | None" # type: ignore[attr-defined] + assert stringify_annotation(int | None, "smart") == "int | None" # type: ignore[attr-defined] + + assert stringify_annotation(int | str) == "int | str" # type: ignore[attr-defined] + assert stringify_annotation(int | str, "smart") == "int | str" # type: ignore[attr-defined] + + assert stringify_annotation(int | str | None) == "int | str | None" # type: ignore[attr-defined] + assert stringify_annotation(int | str | None, "smart") == "int | str | None" # type: ignore[attr-defined] + + assert stringify_annotation(int | tuple[dict[str, int | None], list[int | str]] | None) == "int | tuple[dict[str, int | None], list[int | str]] | None" # type: ignore[attr-defined] + assert stringify_annotation(int | tuple[dict[str, int | None], list[int | str]] | None, "smart") == "int | tuple[dict[str, int | None], list[int | str]] | None" # type: ignore[attr-defined] + + assert stringify_annotation(int | Struct) == "int | struct.Struct" # type: ignore[attr-defined] + assert stringify_annotation(int | Struct, "smart") == "int | ~struct.Struct" # type: ignore[attr-defined] + + +def test_stringify_broken_type_hints(): + assert stringify_annotation(BrokenType, 'fully-qualified-except-typing') == 'tests.test_util_typing.BrokenType' + assert stringify_annotation(BrokenType, "smart") == '~tests.test_util_typing.BrokenType' + + +def test_stringify_mock(): + with mock(['unknown']): + import unknown + assert stringify_annotation(unknown, 'fully-qualified-except-typing') == 'unknown' + assert stringify_annotation(unknown.secret.Class, 'fully-qualified-except-typing') == 'unknown.secret.Class' + assert stringify_annotation(unknown.secret.Class, "smart") == 'unknown.secret.Class' + + +def test_stringify_type_ForwardRef(): + from typing import ForwardRef # type: ignore[attr-defined] + + assert stringify_annotation(ForwardRef("MyInt")) == "MyInt" + assert stringify_annotation(ForwardRef("MyInt"), 'smart') == "MyInt" + + assert stringify_annotation(list[ForwardRef("MyInt")]) == "list[MyInt]" + assert stringify_annotation(list[ForwardRef("MyInt")], 'smart') == "list[MyInt]" + + assert stringify_annotation(Tuple[dict[ForwardRef("MyInt"), str], list[List[int]]]) == "Tuple[dict[MyInt, str], list[List[int]]]" # type: ignore[attr-defined] + assert stringify_annotation(Tuple[dict[ForwardRef("MyInt"), str], list[List[int]]], 'fully-qualified-except-typing') == "Tuple[dict[MyInt, str], list[List[int]]]" # type: ignore[attr-defined] + assert stringify_annotation(Tuple[dict[ForwardRef("MyInt"), str], list[List[int]]], 'smart') == "~typing.Tuple[dict[MyInt, str], list[~typing.List[int]]]" # type: ignore[attr-defined] diff --git a/tests/test_versioning.py b/tests/test_versioning.py new file mode 100644 index 0000000..ae159df --- /dev/null +++ b/tests/test_versioning.py @@ -0,0 +1,120 @@ +"""Test the versioning implementation.""" + +import pickle +import shutil + +import pytest + +from sphinx.testing.util import SphinxTestApp +from sphinx.versioning import add_uids, get_ratio, merge_doctrees + +app = original = original_uids = None + + +@pytest.fixture(scope='module', autouse=True) +def _setup_module(rootdir, sphinx_test_tempdir): + global app, original, original_uids + srcdir = sphinx_test_tempdir / 'test-versioning' + if not srcdir.exists(): + shutil.copytree(rootdir / 'test-versioning', srcdir) + app = SphinxTestApp(srcdir=srcdir) + app.builder.env.app = app + app.connect('doctree-resolved', on_doctree_resolved) + app.build() + original = doctrees['original'] + original_uids = [n.uid for n in add_uids(original, is_paragraph)] + yield + app.cleanup() + + +doctrees = {} + + +def on_doctree_resolved(app, doctree, docname): + doctrees[docname] = doctree + + +def is_paragraph(node): + return node.__class__.__name__ == 'paragraph' + + +def test_get_ratio(): + assert get_ratio('', 'a') + assert get_ratio('a', '') + + +def test_add_uids(): + assert len(original_uids) == 3 + + +def test_picklablility(): + # we have to modify the doctree so we can pickle it + copy = original.copy() + copy.reporter = None + copy.transformer = None + copy.settings.warning_stream = None + copy.settings.env = None + copy.settings.record_dependencies = None + loaded = pickle.loads(pickle.dumps(copy, pickle.HIGHEST_PROTOCOL)) + assert all(getattr(n, 'uid', False) for n in loaded.findall(is_paragraph)) + + +def test_modified(): + modified = doctrees['modified'] + new_nodes = list(merge_doctrees(original, modified, is_paragraph)) + uids = [n.uid for n in modified.findall(is_paragraph)] + assert not new_nodes + assert original_uids == uids + + +def test_added(): + added = doctrees['added'] + new_nodes = list(merge_doctrees(original, added, is_paragraph)) + uids = [n.uid for n in added.findall(is_paragraph)] + assert len(new_nodes) == 1 + assert original_uids == uids[:-1] + + +def test_deleted(): + deleted = doctrees['deleted'] + new_nodes = list(merge_doctrees(original, deleted, is_paragraph)) + uids = [n.uid for n in deleted.findall(is_paragraph)] + assert not new_nodes + assert original_uids[::2] == uids + + +def test_deleted_end(): + deleted_end = doctrees['deleted_end'] + new_nodes = list(merge_doctrees(original, deleted_end, is_paragraph)) + uids = [n.uid for n in deleted_end.findall(is_paragraph)] + assert not new_nodes + assert original_uids[:-1] == uids + + +def test_insert(): + insert = doctrees['insert'] + new_nodes = list(merge_doctrees(original, insert, is_paragraph)) + uids = [n.uid for n in insert.findall(is_paragraph)] + assert len(new_nodes) == 1 + assert original_uids[0] == uids[0] + assert original_uids[1:] == uids[2:] + + +def test_insert_beginning(): + insert_beginning = doctrees['insert_beginning'] + new_nodes = list(merge_doctrees(original, insert_beginning, is_paragraph)) + uids = [n.uid for n in insert_beginning.findall(is_paragraph)] + assert len(new_nodes) == 1 + assert len(uids) == 4 + assert original_uids == uids[1:] + assert original_uids[0] != uids[0] + + +def test_insert_similar(): + insert_similar = doctrees['insert_similar'] + new_nodes = list(merge_doctrees(original, insert_similar, is_paragraph)) + uids = [n.uid for n in insert_similar.findall(is_paragraph)] + assert len(new_nodes) == 1 + assert new_nodes[0].rawsource == 'Anyway I need more' + assert original_uids[0] == uids[0] + assert original_uids[1:] == uids[2:] diff --git a/tests/test_writer_latex.py b/tests/test_writer_latex.py new file mode 100644 index 0000000..a0ab3ee --- /dev/null +++ b/tests/test_writer_latex.py @@ -0,0 +1,28 @@ +"""Test the LaTeX writer""" + +import pytest + +from sphinx.writers.latex import rstdim_to_latexdim + + +def test_rstdim_to_latexdim(): + # Length units docutils supported + # https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#length-units + assert rstdim_to_latexdim('160em') == '160em' + assert rstdim_to_latexdim('160px') == '160\\sphinxpxdimen' + assert rstdim_to_latexdim('160in') == '160in' + assert rstdim_to_latexdim('160cm') == '160cm' + assert rstdim_to_latexdim('160mm') == '160mm' + assert rstdim_to_latexdim('160pt') == '160bp' + assert rstdim_to_latexdim('160pc') == '160pc' + assert rstdim_to_latexdim('30%') == '0.300\\linewidth' + assert rstdim_to_latexdim('160') == '160\\sphinxpxdimen' + + # float values + assert rstdim_to_latexdim('160.0em') == '160.0em' + assert rstdim_to_latexdim('.5em') == '.5em' + + # unknown values (it might be generated by 3rd party extension) + with pytest.raises(ValueError, match='could not convert string to float: '): + rstdim_to_latexdim('unknown') + assert rstdim_to_latexdim('160.0unknown') == '160.0unknown' diff --git a/tests/typing_test_data.py b/tests/typing_test_data.py new file mode 100644 index 0000000..8a7ebc4 --- /dev/null +++ b/tests/typing_test_data.py @@ -0,0 +1,129 @@ +from inspect import Signature +from numbers import Integral +from typing import Any, Callable, Dict, List, Optional, Tuple, TypeVar, Union + + +def f0(x: int, y: Integral) -> None: + pass + + +def f1(x: list[int]) -> List[int]: + pass + + +T = TypeVar('T') +T_co = TypeVar('T_co', covariant=True) +T_contra = TypeVar('T_contra', contravariant=True) + + +def f2(x: List[T], y: List[T_co], z: T) -> List[T_contra]: + pass + + +def f3(x: Union[str, Integral]) -> None: + pass + + +MyStr = str + + +def f4(x: 'MyStr', y: MyStr) -> None: + pass + + +def f5(x: int, *, y: str, z: str) -> None: + pass + + +def f6(x: int, *args, y: str, z: str) -> None: + pass + + +def f7(x: int = None, y: dict = {}) -> None: # NoQA: B006 + pass + + +def f8(x: Callable[[int, str], int]) -> None: + # See https://github.com/ambv/typehinting/issues/149 for Callable[..., int] + pass + + +def f9(x: Callable) -> None: + pass + + +def f10(x: Tuple[int, str], y: Tuple[int, ...]) -> None: + pass + + +class CustomAnnotation: + def __repr__(self): + return 'CustomAnnotation' + + +def f11(x: CustomAnnotation(), y: 123) -> None: + pass + + +def f12() -> Tuple[int, str, int]: + pass + + +def f13() -> Optional[str]: + pass + + +def f14() -> Any: + pass + + +def f15(x: "Unknown", y: "int") -> Any: # noqa: F821 # type: ignore[attr-defined] + pass + + +def f16(arg1, arg2, *, arg3=None, arg4=None): + pass + + +def f17(*, arg3, arg4): + pass + + +def f18(self, arg1: Union[int, Tuple] = 10) -> List[Dict]: + pass + + +def f19(*args: int, **kwargs: str): + pass + + +def f20() -> Optional[Union[int, str]]: + pass + + +def f21(arg1='whatever', arg2=Signature.empty): + pass + + +def f22(*, a, b): + pass + + +def f23(a, b, /, c, d): + pass + + +def f24(a, /, *, b): + pass + + +def f25(a, b, /): + pass + + +class Node: + def __init__(self, parent: Optional['Node']) -> None: + pass + + def children(self) -> List['Node']: + pass diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 0000000..32636b7 --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,55 @@ +import contextlib +import http.server +import pathlib +import threading +from ssl import PROTOCOL_TLS_SERVER, SSLContext + +import filelock + +# Generated with: +# $ openssl req -new -x509 -days 3650 -nodes -out cert.pem \ +# -keyout cert.pem -addext "subjectAltName = DNS:localhost" +TESTS_ROOT = pathlib.Path(__file__).parent +CERT_FILE = str(TESTS_ROOT / "certs" / "cert.pem") + +# File lock for tests +LOCK_PATH = str(TESTS_ROOT / 'test-server.lock') + + +class HttpServerThread(threading.Thread): + def __init__(self, handler, *args, **kwargs): + super().__init__(*args, **kwargs) + self.server = http.server.ThreadingHTTPServer(("localhost", 7777), handler) + + def run(self): + self.server.serve_forever(poll_interval=0.001) + + def terminate(self): + self.server.shutdown() + self.server.server_close() + self.join() + + +class HttpsServerThread(HttpServerThread): + def __init__(self, handler, *args, **kwargs): + super().__init__(handler, *args, **kwargs) + sslcontext = SSLContext(PROTOCOL_TLS_SERVER) + sslcontext.load_cert_chain(CERT_FILE) + self.server.socket = sslcontext.wrap_socket(self.server.socket, server_side=True) + + +def create_server(thread_class): + def server(handler): + lock = filelock.FileLock(LOCK_PATH) + with lock: + server_thread = thread_class(handler, daemon=True) + server_thread.start() + try: + yield server_thread + finally: + server_thread.terminate() + return contextlib.contextmanager(server) + + +http_server = create_server(HttpServerThread) +https_server = create_server(HttpsServerThread) |