From ffcb4b87846b4e4a2d9eee8df4b7ec40365878b8 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Wed, 5 Jun 2024 18:20:58 +0200 Subject: Merging upstream version 7.3.7. Signed-off-by: Daniel Baumann --- tests/test_pycode/__init__.py | 0 tests/test_pycode/test_pycode.py | 173 +++++++++++ tests/test_pycode/test_pycode_ast.py | 68 +++++ tests/test_pycode/test_pycode_parser.py | 511 ++++++++++++++++++++++++++++++++ 4 files changed, 752 insertions(+) create mode 100644 tests/test_pycode/__init__.py create mode 100644 tests/test_pycode/test_pycode.py create mode 100644 tests/test_pycode/test_pycode_ast.py create mode 100644 tests/test_pycode/test_pycode_parser.py (limited to 'tests/test_pycode') diff --git a/tests/test_pycode/__init__.py b/tests/test_pycode/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_pycode/test_pycode.py b/tests/test_pycode/test_pycode.py new file mode 100644 index 0000000..5739787 --- /dev/null +++ b/tests/test_pycode/test_pycode.py @@ -0,0 +1,173 @@ +"""Test pycode.""" + +import os +import sys + +import pytest + +import sphinx +from sphinx.errors import PycodeError +from sphinx.pycode import ModuleAnalyzer + +SPHINX_MODULE_PATH = os.path.splitext(sphinx.__file__)[0] + '.py' + + +def test_ModuleAnalyzer_get_module_source(): + assert ModuleAnalyzer.get_module_source('sphinx') == (sphinx.__file__, sphinx.__loader__.get_source('sphinx')) + + # failed to obtain source information from builtin modules + with pytest.raises(PycodeError): + ModuleAnalyzer.get_module_source('builtins') + with pytest.raises(PycodeError): + ModuleAnalyzer.get_module_source('itertools') + + +def test_ModuleAnalyzer_for_string(): + analyzer = ModuleAnalyzer.for_string('print("Hello world")', 'module_name') + assert analyzer.modname == 'module_name' + assert analyzer.srcname == '' + + +def test_ModuleAnalyzer_for_file(): + analyzer = ModuleAnalyzer.for_string(SPHINX_MODULE_PATH, 'sphinx') + assert analyzer.modname == 'sphinx' + assert analyzer.srcname == '' + + +def test_ModuleAnalyzer_for_module(rootdir): + analyzer = ModuleAnalyzer.for_module('sphinx') + assert analyzer.modname == 'sphinx' + assert analyzer.srcname in (SPHINX_MODULE_PATH, + os.path.abspath(SPHINX_MODULE_PATH)) + + path = str(rootdir / 'test-pycode') + sys.path.insert(0, path) + try: + analyzer = ModuleAnalyzer.for_module('cp_1251_coded') + docs = analyzer.find_attr_docs() + assert docs == {('', 'X'): ['It MUST look like X="\u0425"', '']} + finally: + sys.path.pop(0) + + +def test_ModuleAnalyzer_find_tags(): + code = ('class Foo(object):\n' # line: 1 + ' """class Foo!"""\n' + ' def __init__(self):\n' + ' pass\n' + '\n' + ' def bar(self, arg1, arg2=True, *args, **kwargs):\n' + ' """method Foo.bar"""\n' + ' pass\n' + '\n' + ' class Baz(object):\n' + ' def __init__(self):\n' # line: 11 + ' pass\n' + '\n' + 'def qux():\n' + ' """function baz"""\n' + ' pass\n' + '\n' + '@decorator1\n' + '@decorator2\n' + 'def quux():\n' + ' pass\n' # line: 21 + '\n' + 'class Corge(object):\n' + ' @decorator1\n' + ' @decorator2\n' + ' def grault(self):\n' + ' pass\n') + analyzer = ModuleAnalyzer.for_string(code, 'module') + tags = analyzer.find_tags() + assert set(tags.keys()) == {'Foo', 'Foo.__init__', 'Foo.bar', + 'Foo.Baz', 'Foo.Baz.__init__', 'qux', 'quux', + 'Corge', 'Corge.grault'} + assert tags['Foo'] == ('class', 1, 12) # type, start, end + assert tags['Foo.__init__'] == ('def', 3, 4) + assert tags['Foo.bar'] == ('def', 6, 8) + assert tags['Foo.Baz'] == ('class', 10, 12) + assert tags['Foo.Baz.__init__'] == ('def', 11, 12) + assert tags['qux'] == ('def', 14, 16) + assert tags['quux'] == ('def', 18, 21) + assert tags['Corge'] == ('class', 23, 27) + assert tags['Corge.grault'] == ('def', 24, 27) + + +def test_ModuleAnalyzer_find_attr_docs(): + code = ('class Foo(object):\n' + ' """class Foo!"""\n' + ' #: comment before attr1\n' + ' attr1 = None\n' + ' attr2 = None # attribute comment for attr2 (without colon)\n' + ' attr3 = None #: attribute comment for attr3\n' + ' attr4 = None #: long attribute comment\n' + ' #: for attr4\n' + ' #: comment before attr5\n' + ' attr5 = None #: attribute comment for attr5\n' + ' attr6, attr7 = 1, 2 #: this comment is ignored\n' + '\n' + ' def __init__(self):\n' + ' self.attr8 = None #: first attribute comment (ignored)\n' + ' self.attr8 = None #: attribute comment for attr8\n' + ' #: comment before attr9\n' + ' self.attr9 = None #: comment after attr9\n' + ' "string after attr9"\n' + '\n' + ' def bar(self, arg1, arg2=True, *args, **kwargs):\n' + ' """method Foo.bar"""\n' + ' pass\n' + '\n' + 'def baz():\n' + ' """function baz"""\n' + ' pass\n' + '\n' + 'class Qux: attr1 = 1; attr2 = 2') + analyzer = ModuleAnalyzer.for_string(code, 'module') + docs = analyzer.find_attr_docs() + assert set(docs) == {('Foo', 'attr1'), + ('Foo', 'attr3'), + ('Foo', 'attr4'), + ('Foo', 'attr5'), + ('Foo', 'attr6'), + ('Foo', 'attr7'), + ('Foo', 'attr8'), + ('Foo', 'attr9')} + assert docs[('Foo', 'attr1')] == ['comment before attr1', ''] + assert docs[('Foo', 'attr3')] == ['attribute comment for attr3', ''] + assert docs[('Foo', 'attr4')] == ['long attribute comment', ''] + assert docs[('Foo', 'attr4')] == ['long attribute comment', ''] + assert docs[('Foo', 'attr5')] == ['attribute comment for attr5', ''] + assert docs[('Foo', 'attr6')] == ['this comment is ignored', ''] + assert docs[('Foo', 'attr7')] == ['this comment is ignored', ''] + assert docs[('Foo', 'attr8')] == ['attribute comment for attr8', ''] + assert docs[('Foo', 'attr9')] == ['string after attr9', ''] + assert analyzer.tagorder == {'Foo': 0, + 'Foo.__init__': 8, + 'Foo.attr1': 1, + 'Foo.attr2': 2, + 'Foo.attr3': 3, + 'Foo.attr4': 4, + 'Foo.attr5': 5, + 'Foo.attr6': 6, + 'Foo.attr7': 7, + 'Foo.attr8': 10, + 'Foo.attr9': 12, + 'Foo.bar': 13, + 'baz': 14, + 'Qux': 15, + 'Qux.attr1': 16, + 'Qux.attr2': 17} + + +def test_ModuleAnalyzer_find_attr_docs_for_posonlyargs_method(): + code = ('class Foo(object):\n' + ' def __init__(self, /):\n' + ' self.attr = None #: attribute comment\n') + analyzer = ModuleAnalyzer.for_string(code, 'module') + docs = analyzer.find_attr_docs() + assert set(docs) == {('Foo', 'attr')} + assert docs[('Foo', 'attr')] == ['attribute comment', ''] + assert analyzer.tagorder == {'Foo': 0, + 'Foo.__init__': 1, + 'Foo.attr': 2} diff --git a/tests/test_pycode/test_pycode_ast.py b/tests/test_pycode/test_pycode_ast.py new file mode 100644 index 0000000..1ed43e1 --- /dev/null +++ b/tests/test_pycode/test_pycode_ast.py @@ -0,0 +1,68 @@ +"""Test pycode.ast""" + +import ast + +import pytest + +from sphinx.pycode.ast import unparse as ast_unparse + + +@pytest.mark.parametrize(('source', 'expected'), [ + ("a + b", "a + b"), # Add + ("a and b", "a and b"), # And + ("os.path", "os.path"), # Attribute + ("1 * 2", "1 * 2"), # BinOp + ("a & b", "a & b"), # BitAnd + ("a | b", "a | b"), # BitOr + ("a ^ b", "a ^ b"), # BitXor + ("a and b and c", "a and b and c"), # BoolOp + ("b'bytes'", "b'bytes'"), # Bytes + ("object()", "object()"), # Call + ("1234", "1234"), # Constant, Num + ("{'key1': 'value1', 'key2': 'value2'}", + "{'key1': 'value1', 'key2': 'value2'}"), # Dict + ("a / b", "a / b"), # Div + ("...", "..."), # Ellipsis + ("a // b", "a // b"), # FloorDiv + ("Tuple[int, int]", "Tuple[int, int]"), # Index, Subscript + ("~1", "~1"), # Invert + ("lambda x, y: x + y", + "lambda x, y: ..."), # Lambda + ("[1, 2, 3]", "[1, 2, 3]"), # List + ("a << b", "a << b"), # LShift + ("a @ b", "a @ b"), # MatMult + ("a % b", "a % b"), # Mod + ("a * b", "a * b"), # Mult + ("sys", "sys"), # Name, NameConstant + ("not a", "not a"), # Not + ("a or b", "a or b"), # Or + ("a**b", "a**b"), # Pow + ("a >> b", "a >> b"), # RShift + ("{1, 2, 3}", "{1, 2, 3}"), # Set + ("a - b", "a - b"), # Sub + ("'str'", "'str'"), # Str + ("+a", "+a"), # UAdd + ("-1", "-1"), # UnaryOp + ("-a", "-a"), # USub + ("(1, 2, 3)", "(1, 2, 3)"), # Tuple + ("()", "()"), # Tuple (empty) + ("(1,)", "(1,)"), # Tuple (single item) + ("lambda x=0, /, y=1, *args, z, **kwargs: x + y + z", + "lambda x=0, /, y=1, *args, z, **kwargs: ..."), # posonlyargs + ("0x1234", "0x1234"), # Constant + ("1_000_000", "1_000_000"), # Constant + ("Tuple[:,:]", "Tuple[:, :]"), # Index, Subscript, 2x Slice + ("Tuple[1:2]", "Tuple[1:2]"), # Index, Subscript, Slice(no-step) + ("Tuple[1:2:3]", "Tuple[1:2:3]"), # Index, Subscript, Slice + ("x[:, np.newaxis, :, :]", + "x[:, np.newaxis, :, :]"), # Index, Subscript, numpy extended syntax + ("y[:, 1:3][np.array([0, 2, 4]), :]", + "y[:, 1:3][np.array([0, 2, 4]), :]"), # Index, 2x Subscript, numpy extended syntax +]) +def test_unparse(source, expected): + module = ast.parse(source) + assert ast_unparse(module.body[0].value, source) == expected + + +def test_unparse_None(): + assert ast_unparse(None) is None diff --git a/tests/test_pycode/test_pycode_parser.py b/tests/test_pycode/test_pycode_parser.py new file mode 100644 index 0000000..fde648d --- /dev/null +++ b/tests/test_pycode/test_pycode_parser.py @@ -0,0 +1,511 @@ +"""Test pycode.parser.""" + +from sphinx.pycode.parser import Parser +from sphinx.util.inspect import signature_from_str + + +def test_comment_picker_basic(): + source = ('a = 1 + 1 #: assignment\n' + 'b = 1 +\\\n 1 #: assignment including a CR\n' + 'c = (1 +\n 1) #: tuple \n' + 'd = {1, \n 1} #: set\n' + 'e = [1, \n 1] #: list #: additional comment\n' + 'f = "abc"\n' + '#: string; comment on next line (ignored)\n' + 'g = 1.0\n' + '"""float; string on next line"""\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('', 'a'): 'assignment', + ('', 'b'): 'assignment including a CR', + ('', 'c'): 'tuple ', + ('', 'd'): ' set', + ('', 'e'): 'list #: additional comment', + ('', 'g'): 'float; string on next line'} + + +def test_comment_picker_location(): + # multiple "before" comments + source = ('#: comment before assignment1\n' + '#:\n' + '#: comment before assignment2\n' + 'a = 1 + 1\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('', 'a'): ('comment before assignment1\n' + '\n' + 'comment before assignment2')} + + # before and after comments + source = ('#: comment before assignment\n' + 'a = 1 + 1 #: comment after assignment\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('', 'a'): 'comment after assignment'} + + # after comment and next line string + source = ('a = 1 + 1\n #: comment after assignment\n' + '"""string on next line"""\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('', 'a'): 'string on next line'} + + # before comment and next line string + source = ('#: comment before assignment\n' + 'a = 1 + 1\n' + '"""string on next line"""\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('', 'a'): 'string on next line'} + + # before comment, after comment and next line string + source = ('#: comment before assignment\n' + 'a = 1 + 1 #: comment after assignment\n' + '"""string on next line"""\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('', 'a'): 'string on next line'} + + # inside __init__ method + source = ('class Foo(object):\n' + ' def __init__(self):\n' + ' #: comment before assignment\n' + ' self.attr1 = None\n' + ' self.attr2 = None #: comment after assignment\n' + '\n' + ' #: comment for attr3(1)\n' + ' self.attr3 = None #: comment for attr3(2)\n' + ' """comment for attr3(3)"""\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('Foo', 'attr1'): 'comment before assignment', + ('Foo', 'attr2'): 'comment after assignment', + ('Foo', 'attr3'): 'comment for attr3(3)'} + + +def test_annotated_assignment(): + source = ('a: str = "Sphinx" #: comment\n' + 'b: int = 1\n' + '"""string on next line"""\n' + 'c: int #: comment\n' + 'd = 1 # type: int\n' + '"""string on next line"""\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('', 'a'): 'comment', + ('', 'b'): 'string on next line', + ('', 'c'): 'comment', + ('', 'd'): 'string on next line'} + assert parser.annotations == {('', 'a'): 'str', + ('', 'b'): 'int', + ('', 'c'): 'int', + ('', 'd'): 'int'} + assert parser.definitions == {} + + +def test_complex_assignment(): + source = ('a = 1 + 1; b = a #: compound statement\n' + 'c, d = (1, 1) #: unpack assignment\n' + 'e = True #: first assignment\n' + 'e = False #: second assignment\n' + 'f = g = None #: multiple assignment at once\n' + '(theta, phi) = (0, 0.5) #: unpack assignment via tuple\n' + '[x, y] = (5, 6) #: unpack assignment via list\n' + 'h, *i, j = (1, 2, 3, 4) #: unpack assignment2\n' + 'k, *self.attr = (5, 6, 7) #: unpack assignment3\n' + 'l, *m[0] = (8, 9, 0) #: unpack assignment4\n' + ) + parser = Parser(source) + parser.parse() + assert parser.comments == {('', 'b'): 'compound statement', + ('', 'c'): 'unpack assignment', + ('', 'd'): 'unpack assignment', + ('', 'e'): 'second assignment', + ('', 'f'): 'multiple assignment at once', + ('', 'g'): 'multiple assignment at once', + ('', 'theta'): 'unpack assignment via tuple', + ('', 'phi'): 'unpack assignment via tuple', + ('', 'x'): 'unpack assignment via list', + ('', 'y'): 'unpack assignment via list', + ('', 'h'): 'unpack assignment2', + ('', 'i'): 'unpack assignment2', + ('', 'j'): 'unpack assignment2', + ('', 'k'): 'unpack assignment3', + ('', 'l'): 'unpack assignment4', + } + assert parser.definitions == {} + + +def test_assignment_in_try_clause(): + source = ('try:\n' + ' a = None #: comment\n' + 'except:\n' + ' b = None #: ignored\n' + 'else:\n' + ' c = None #: comment\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('', 'a'): 'comment', + ('', 'c'): 'comment'} + assert parser.deforders == {'a': 0, + 'c': 1} + + +def test_obj_assignment(): + source = ('obj = SomeObject() #: some object\n' + 'obj.attr = 1 #: attr1\n' + 'obj.attr.attr = 1 #: attr2\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('', 'obj'): 'some object'} + assert parser.definitions == {} + + +def test_container_assignment(): + source = ('l = [] #: list\n' + 'l[1] = True #: list assignment\n' + 'l[0:0] = [] #: list assignment\n' + 'l[_from:_to] = [] #: list assignment\n' + 'd = {} #: dict\n' + 'd["doc"] = 1 #: dict assignment\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('', 'l'): 'list', + ('', 'd'): 'dict'} + assert parser.definitions == {} + + +def test_function(): + source = ('def some_function():\n' + ' """docstring"""\n' + ' a = 1 + 1 #: comment1\n' + '\n' + ' b = a #: comment2\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {} + assert parser.definitions == {'some_function': ('def', 1, 5)} + assert parser.deforders == {'some_function': 0} + + +def test_nested_function(): + source = ('def some_function():\n' + ' a = 1 + 1 #: comment1\n' + '\n' + ' def inner_function():\n' + ' b = 1 + 1 #: comment2\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {} + assert parser.definitions == {'some_function': ('def', 1, 5)} + assert parser.deforders == {'some_function': 0} + + +def test_class(): + source = ('class Foo(object):\n' + ' attr1 = None #: comment1\n' + ' attr2 = None #: comment2\n' + '\n' + ' def __init__(self):\n' + ' self.a = 1 + 1 #: comment3\n' + ' self.attr2 = 1 + 1 #: overridden\n' + ' b = 1 + 1 #: comment5\n' + '\n' + ' def some_method(self):\n' + ' c = 1 + 1 #: comment6\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('Foo', 'attr1'): 'comment1', + ('Foo', 'a'): 'comment3', + ('Foo', 'attr2'): 'overridden'} + assert parser.definitions == {'Foo': ('class', 1, 11), + 'Foo.__init__': ('def', 5, 8), + 'Foo.some_method': ('def', 10, 11)} + assert parser.deforders == {'Foo': 0, + 'Foo.attr1': 1, + 'Foo.__init__': 3, + 'Foo.a': 4, + 'Foo.attr2': 5, + 'Foo.some_method': 6} + + +def test_class_uses_non_self(): + source = ('class Foo(object):\n' + ' def __init__(this):\n' + ' this.a = 1 + 1 #: comment\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('Foo', 'a'): 'comment'} + assert parser.definitions == {'Foo': ('class', 1, 3), + 'Foo.__init__': ('def', 2, 3)} + assert parser.deforders == {'Foo': 0, + 'Foo.__init__': 1, + 'Foo.a': 2} + + +def test_nested_class(): + source = ('class Foo(object):\n' + ' attr1 = None #: comment1\n' + '\n' + ' class Bar(object):\n' + ' attr2 = None #: comment2\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('Foo', 'attr1'): 'comment1', + ('Foo.Bar', 'attr2'): 'comment2'} + assert parser.definitions == {'Foo': ('class', 1, 5), + 'Foo.Bar': ('class', 4, 5)} + assert parser.deforders == {'Foo': 0, + 'Foo.attr1': 1, + 'Foo.Bar': 2, + 'Foo.Bar.attr2': 3} + + +def test_class_comment(): + source = ('import logging\n' + 'logger = logging.getLogger(__name__)\n' + '\n' + 'class Foo(object):\n' + ' """Bar"""\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {} + assert parser.definitions == {'Foo': ('class', 4, 5)} + + +def test_comment_picker_multiline_string(): + source = ('class Foo(object):\n' + ' a = None\n' + ' """multiline\n' + ' docstring\n' + ' """\n' + ' b = None\n' + ' """\n' + ' docstring\n' + ' starts with::\n' + '\n' + ' empty line"""\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('Foo', 'a'): 'multiline\ndocstring', + ('Foo', 'b'): 'docstring\nstarts with::\n\n empty line'} + + +def test_decorators(): + source = ('@deco\n' + 'def func1(): pass\n' + '\n' + '@deco(param1, param2)\n' + 'def func2(): pass\n' + '\n' + '@deco1\n' + '@deco2\n' + 'def func3(): pass\n' + '\n' + '@deco\n' + 'class Foo():\n' + ' @deco1\n' + ' @deco2\n' + ' def method(self): pass\n') + parser = Parser(source) + parser.parse() + assert parser.definitions == {'func1': ('def', 1, 2), + 'func2': ('def', 4, 5), + 'func3': ('def', 7, 9), + 'Foo': ('class', 11, 15), + 'Foo.method': ('def', 13, 15)} + + +def test_async_function_and_method(): + source = ('async def some_function():\n' + ' """docstring"""\n' + ' a = 1 + 1 #: comment1\n' + '\n' + 'class Foo:\n' + ' async def method(self):\n' + ' pass\n') + parser = Parser(source) + parser.parse() + assert parser.definitions == {'some_function': ('def', 1, 3), + 'Foo': ('class', 5, 7), + 'Foo.method': ('def', 6, 7)} + + +def test_imports(): + source = ('import sys\n' + 'from os import environment, path\n' + '\n' + 'import sphinx as Sphinx\n' + 'from sphinx.application import Sphinx as App\n') + parser = Parser(source) + parser.parse() + assert parser.definitions == {} + assert parser.deforders == {'sys': 0, + 'environment': 1, + 'path': 2, + 'Sphinx': 3, + 'App': 4} + + +def test_formfeed_char(): + source = ('class Foo:\n' + '\f\n' + ' attr = 1234 #: comment\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {('Foo', 'attr'): 'comment'} + + +def test_typing_final(): + source = ('import typing\n' + '\n' + '@typing.final\n' + 'def func(): pass\n' + '\n' + '@typing.final\n' + 'class Foo:\n' + ' @typing.final\n' + ' def meth(self):\n' + ' pass\n') + parser = Parser(source) + parser.parse() + assert parser.finals == ['func', 'Foo', 'Foo.meth'] + + +def test_typing_final_from_import(): + source = ('from typing import final\n' + '\n' + '@final\n' + 'def func(): pass\n' + '\n' + '@final\n' + 'class Foo:\n' + ' @final\n' + ' def meth(self):\n' + ' pass\n') + parser = Parser(source) + parser.parse() + assert parser.finals == ['func', 'Foo', 'Foo.meth'] + + +def test_typing_final_import_as(): + source = ('import typing as foo\n' + '\n' + '@foo.final\n' + 'def func(): pass\n' + '\n' + '@foo.final\n' + 'class Foo:\n' + ' @typing.final\n' + ' def meth(self):\n' + ' pass\n') + parser = Parser(source) + parser.parse() + assert parser.finals == ['func', 'Foo'] + + +def test_typing_final_from_import_as(): + source = ('from typing import final as bar\n' + '\n' + '@bar\n' + 'def func(): pass\n' + '\n' + '@bar\n' + 'class Foo:\n' + ' @final\n' + ' def meth(self):\n' + ' pass\n') + parser = Parser(source) + parser.parse() + assert parser.finals == ['func', 'Foo'] + + +def test_typing_final_not_imported(): + source = ('@typing.final\n' + 'def func(): pass\n' + '\n' + '@typing.final\n' + 'class Foo:\n' + ' @final\n' + ' def meth(self):\n' + ' pass\n') + parser = Parser(source) + parser.parse() + assert parser.finals == [] + + +def test_typing_overload(): + source = ('import typing\n' + '\n' + '@typing.overload\n' + 'def func(x: int, y: int) -> int: pass\n' + '\n' + '@typing.overload\n' + 'def func(x: str, y: str) -> str: pass\n' + '\n' + 'def func(x, y): pass\n') + parser = Parser(source) + parser.parse() + assert parser.overloads == {'func': [signature_from_str('(x: int, y: int) -> int'), + signature_from_str('(x: str, y: str) -> str')]} + + +def test_typing_overload_from_import(): + source = ('from typing import overload\n' + '\n' + '@overload\n' + 'def func(x: int, y: int) -> int: pass\n' + '\n' + '@overload\n' + 'def func(x: str, y: str) -> str: pass\n' + '\n' + 'def func(x, y): pass\n') + parser = Parser(source) + parser.parse() + assert parser.overloads == {'func': [signature_from_str('(x: int, y: int) -> int'), + signature_from_str('(x: str, y: str) -> str')]} + + +def test_typing_overload_import_as(): + source = ('import typing as foo\n' + '\n' + '@foo.overload\n' + 'def func(x: int, y: int) -> int: pass\n' + '\n' + '@foo.overload\n' + 'def func(x: str, y: str) -> str: pass\n' + '\n' + 'def func(x, y): pass\n') + parser = Parser(source) + parser.parse() + assert parser.overloads == {'func': [signature_from_str('(x: int, y: int) -> int'), + signature_from_str('(x: str, y: str) -> str')]} + + +def test_typing_overload_from_import_as(): + source = ('from typing import overload as bar\n' + '\n' + '@bar\n' + 'def func(x: int, y: int) -> int: pass\n' + '\n' + '@bar\n' + 'def func(x: str, y: str) -> str: pass\n' + '\n' + 'def func(x, y): pass\n') + parser = Parser(source) + parser.parse() + assert parser.overloads == {'func': [signature_from_str('(x: int, y: int) -> int'), + signature_from_str('(x: str, y: str) -> str')]} + + +def test_typing_overload_not_imported(): + source = ('@typing.final\n' + 'def func(x: int, y: int) -> int: pass\n' + '\n' + '@typing.final\n' + 'def func(x: str, y: str) -> str: pass\n' + '\n' + 'def func(x, y): pass\n') + parser = Parser(source) + parser.parse() + assert parser.overloads == {} -- cgit v1.2.3