diff options
Diffstat (limited to 'test/sanity')
36 files changed, 1700 insertions, 0 deletions
diff --git a/test/sanity/code-smell/ansible-requirements.json b/test/sanity/code-smell/ansible-requirements.json new file mode 100644 index 0000000..b4b7f2b --- /dev/null +++ b/test/sanity/code-smell/ansible-requirements.json @@ -0,0 +1,7 @@ +{ + "prefixes": [ + "requirements.txt", + "test/lib/ansible_test/_data/requirements/ansible.txt" + ], + "output": "path-line-column-message" +} diff --git a/test/sanity/code-smell/ansible-requirements.py b/test/sanity/code-smell/ansible-requirements.py new file mode 100644 index 0000000..4d1a652 --- /dev/null +++ b/test/sanity/code-smell/ansible-requirements.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +import re +import sys + + +def read_file(path): + try: + with open(path, 'r') as f: + return f.read() + except Exception as ex: # pylint: disable=broad-except + print('%s:%d:%d: unable to read required file %s' % (path, 0, 0, re.sub(r'\s+', ' ', str(ex)))) + return None + + +def main(): + ORIGINAL_FILE = 'requirements.txt' + VENDORED_COPY = 'test/lib/ansible_test/_data/requirements/ansible.txt' + + original_requirements = read_file(ORIGINAL_FILE) + vendored_requirements = read_file(VENDORED_COPY) + + if original_requirements is not None and vendored_requirements is not None: + if original_requirements != vendored_requirements: + print('%s:%d:%d: must be identical to %s' % (VENDORED_COPY, 0, 0, ORIGINAL_FILE)) + + +if __name__ == '__main__': + main() diff --git a/test/sanity/code-smell/ansible-test-future-boilerplate.json b/test/sanity/code-smell/ansible-test-future-boilerplate.json new file mode 100644 index 0000000..ca4c067 --- /dev/null +++ b/test/sanity/code-smell/ansible-test-future-boilerplate.json @@ -0,0 +1,10 @@ +{ + "extensions": [ + ".py" + ], + "prefixes": [ + "test/sanity/", + "test/lib/ansible_test/" + ], + "output": "path-message" +} diff --git a/test/sanity/code-smell/ansible-test-future-boilerplate.py b/test/sanity/code-smell/ansible-test-future-boilerplate.py new file mode 100644 index 0000000..9a62225 --- /dev/null +++ b/test/sanity/code-smell/ansible-test-future-boilerplate.py @@ -0,0 +1,59 @@ +from __future__ import annotations + +import ast +import sys + + +def main(): + # The following directories contain code which must work under Python 2.x. + py2_compat = ( + 'test/lib/ansible_test/_util/target/', + ) + + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + if any(path.startswith(prefix) for prefix in py2_compat): + continue + + with open(path, 'rb') as path_fd: + lines = path_fd.read().splitlines() + + missing = True + if not lines: + # Files are allowed to be empty of everything including boilerplate + missing = False + + invalid_future = [] + + for text in lines: + if text == b'from __future__ import annotations': + missing = False + break + + if text.startswith(b'from __future__ ') or text == b'__metaclass__ = type': + invalid_future.append(text.decode()) + + if missing: + with open(path) as file: + contents = file.read() + + # noinspection PyBroadException + try: + node = ast.parse(contents) + + # files consisting of only assignments have no need for future import boilerplate + # the only exception would be division during assignment, but we'll overlook that for simplicity + # the most likely case is that of a documentation only python file + if all(isinstance(statement, ast.Assign) for statement in node.body): + missing = False + except Exception: # pylint: disable=broad-except + pass # the compile sanity test will report this error + + if missing: + print('%s: missing: from __future__ import annotations' % path) + + for text in invalid_future: + print('%s: invalid: %s' % (path, text)) + + +if __name__ == '__main__': + main() diff --git a/test/sanity/code-smell/configure-remoting-ps1.json b/test/sanity/code-smell/configure-remoting-ps1.json new file mode 100644 index 0000000..593b765 --- /dev/null +++ b/test/sanity/code-smell/configure-remoting-ps1.json @@ -0,0 +1,4 @@ +{ + "no_targets": true, + "output": "path-message" +} diff --git a/test/sanity/code-smell/configure-remoting-ps1.py b/test/sanity/code-smell/configure-remoting-ps1.py new file mode 100644 index 0000000..fe67800 --- /dev/null +++ b/test/sanity/code-smell/configure-remoting-ps1.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +import os + + +def main(): + # required by external automated processes and should not be moved, renamed or converted to a symbolic link + original = 'examples/scripts/ConfigureRemotingForAnsible.ps1' + # required to be packaged with ansible-test and must match the original file, but cannot be a symbolic link + # the packaged version is needed to run tests when ansible-test has been installed + # keeping the packaged version identical to the original makes sure tests cover both files + packaged = 'test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1' + + copy_valid = False + + if os.path.isfile(original) and os.path.isfile(packaged): + with open(original, 'rb') as original_file: + original_content = original_file.read() + + with open(packaged, 'rb') as packaged_file: + packaged_content = packaged_file.read() + + if original_content == packaged_content: + copy_valid = True + + if not copy_valid: + print('%s: must be an exact copy of "%s"' % (packaged, original)) + + for path in [original, packaged]: + directory = path + + while True: + directory = os.path.dirname(directory) + + if not directory: + break + + if not os.path.isdir(directory): + print('%s: must be a directory' % directory) + + if os.path.islink(directory): + print('%s: cannot be a symbolic link' % directory) + + if not os.path.isfile(path): + print('%s: must be a file' % path) + + if os.path.islink(path): + print('%s: cannot be a symbolic link' % path) + + +if __name__ == '__main__': + main() diff --git a/test/sanity/code-smell/deprecated-config.json b/test/sanity/code-smell/deprecated-config.json new file mode 100644 index 0000000..4a88486 --- /dev/null +++ b/test/sanity/code-smell/deprecated-config.json @@ -0,0 +1,10 @@ +{ + "all_targets": true, + "output": "path-message", + "extensions": [ + ".py" + ], + "prefixes": [ + "lib/ansible/" + ] +} diff --git a/test/sanity/code-smell/deprecated-config.py b/test/sanity/code-smell/deprecated-config.py new file mode 100644 index 0000000..474628a --- /dev/null +++ b/test/sanity/code-smell/deprecated-config.py @@ -0,0 +1,103 @@ +# -*- coding: utf-8 -*- +# (c) 2018, Matt Martz <matt@sivel.net> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +from __future__ import annotations + +import mmap +import os +import re +import sys + +from ansible.module_utils.compat.version import StrictVersion + +import yaml + +import ansible.config + +from ansible.plugins.loader import fragment_loader +from ansible.release import __version__ as ansible_version +from ansible.utils.plugin_docs import get_docstring + +DOC_RE = re.compile(b'^DOCUMENTATION', flags=re.M) +ANSIBLE_MAJOR = StrictVersion('.'.join(ansible_version.split('.')[:2])) + + +def find_deprecations(obj, path=None): + if not isinstance(obj, (list, dict)): + return + + try: + items = obj.items() + except AttributeError: + items = enumerate(obj) + + for key, value in items: + if path is None: + this_path = [] + else: + this_path = path[:] + + this_path.append(key) + + if key != 'deprecated': + for result in find_deprecations(value, path=this_path): + yield result + else: + try: + version = value['version'] + this_path.append('version') + except KeyError: + version = value['removed_in'] + this_path.append('removed_in') + if StrictVersion(version) <= ANSIBLE_MAJOR: + yield (this_path, version) + + +def main(): + plugins = [] + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'rb') as f: + try: + mm_file = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) + except ValueError: + continue + if DOC_RE.search(mm_file): + plugins.append(path) + mm_file.close() + + for plugin in plugins: + data = {} + data['doc'], data['examples'], data['return'], data['metadata'] = get_docstring(plugin, fragment_loader) + for result in find_deprecations(data['doc']): + print( + '%s: %s is scheduled for removal in %s' % (plugin, '.'.join(str(i) for i in result[0][:-2]), result[1]) + ) + + base = os.path.join(os.path.dirname(ansible.config.__file__), 'base.yml') + root_path = os.path.dirname(os.path.dirname(os.path.dirname(ansible.__file__))) + relative_base = os.path.relpath(base, root_path) + + with open(base) as f: + data = yaml.safe_load(f) + + for result in find_deprecations(data): + print('%s: %s is scheduled for removal in %s' % (relative_base, '.'.join(str(i) for i in result[0][:-2]), result[1])) + + +if __name__ == '__main__': + main() diff --git a/test/sanity/code-smell/deprecated-config.requirements.in b/test/sanity/code-smell/deprecated-config.requirements.in new file mode 100644 index 0000000..859c4ee --- /dev/null +++ b/test/sanity/code-smell/deprecated-config.requirements.in @@ -0,0 +1,2 @@ +jinja2 # ansible-core requirement +pyyaml diff --git a/test/sanity/code-smell/deprecated-config.requirements.txt b/test/sanity/code-smell/deprecated-config.requirements.txt new file mode 100644 index 0000000..6ab26e3 --- /dev/null +++ b/test/sanity/code-smell/deprecated-config.requirements.txt @@ -0,0 +1,4 @@ +# edit "deprecated-config.requirements.in" and generate with: hacking/update-sanity-requirements.py --test deprecated-config +Jinja2==3.1.2 +MarkupSafe==2.1.1 +PyYAML==6.0 diff --git a/test/sanity/code-smell/docs-build.json b/test/sanity/code-smell/docs-build.json new file mode 100644 index 0000000..a43fa92 --- /dev/null +++ b/test/sanity/code-smell/docs-build.json @@ -0,0 +1,5 @@ +{ + "disabled": true, + "no_targets": true, + "output": "path-line-column-message" +} diff --git a/test/sanity/code-smell/docs-build.py b/test/sanity/code-smell/docs-build.py new file mode 100644 index 0000000..aaa6937 --- /dev/null +++ b/test/sanity/code-smell/docs-build.py @@ -0,0 +1,152 @@ +from __future__ import annotations + +import os +import re +import shutil +import subprocess +import sys +import tempfile + + +def main(): + base_dir = os.getcwd() + os.path.sep + docs_dir = os.path.abspath('docs/docsite') + + # TODO: Remove this temporary hack to constrain 'cryptography' when we have + # a better story for dealing with it. + tmpfd, tmp = tempfile.mkstemp() + requirements_txt = os.path.join(base_dir, 'requirements.txt') + shutil.copy2(requirements_txt, tmp) + lines = [] + with open(requirements_txt, 'r') as f: + for line in f.readlines(): + if line.strip() == 'cryptography': + line = 'cryptography < 3.4\n' + lines.append(line) + + with open(requirements_txt, 'w') as f: + f.writelines(lines) + + try: + cmd = ['make', 'core_singlehtmldocs'] + sphinx = subprocess.run(cmd, stdin=subprocess.DEVNULL, capture_output=True, cwd=docs_dir, check=False, text=True) + finally: + shutil.move(tmp, requirements_txt) + + stdout = sphinx.stdout + stderr = sphinx.stderr + + if sphinx.returncode != 0: + sys.stderr.write("Command '%s' failed with status code: %d\n" % (' '.join(cmd), sphinx.returncode)) + + if stdout.strip(): + stdout = simplify_stdout(stdout) + + sys.stderr.write("--> Standard Output\n") + sys.stderr.write("%s\n" % stdout.strip()) + + if stderr.strip(): + sys.stderr.write("--> Standard Error\n") + sys.stderr.write("%s\n" % stderr.strip()) + + sys.exit(1) + + with open('docs/docsite/rst_warnings', 'r') as warnings_fd: + output = warnings_fd.read().strip() + lines = output.splitlines() + + known_warnings = { + 'block-quote-missing-blank-line': r'^Block quote ends without a blank line; unexpected unindent.$', + 'literal-block-lex-error': r'^Could not lex literal_block as "[^"]*". Highlighting skipped.$', + 'duplicate-label': r'^duplicate label ', + 'undefined-label': r'undefined label: ', + 'unknown-document': r'unknown document: ', + 'toc-tree-missing-document': r'toctree contains reference to nonexisting document ', + 'reference-target-not-found': r'[^ ]* reference target not found: ', + 'not-in-toc-tree': r"document isn't included in any toctree$", + 'unexpected-indentation': r'^Unexpected indentation.$', + 'definition-list-missing-blank-line': r'^Definition list ends without a blank line; unexpected unindent.$', + 'explicit-markup-missing-blank-line': r'Explicit markup ends without a blank line; unexpected unindent.$', + 'toc-tree-glob-pattern-no-match': r"^toctree glob pattern '[^']*' didn't match any documents$", + 'unknown-interpreted-text-role': '^Unknown interpreted text role "[^"]*".$', + } + + for line in lines: + match = re.search('^(?P<path>[^:]+):((?P<line>[0-9]+):)?((?P<column>[0-9]+):)? (?P<level>WARNING|ERROR): (?P<message>.*)$', line) + + if not match: + path = 'docs/docsite/rst/index.rst' + lineno = 0 + column = 0 + code = 'unknown' + message = line + + # surface unknown lines while filtering out known lines to avoid excessive output + print('%s:%d:%d: %s: %s' % (path, lineno, column, code, message)) + continue + + path = match.group('path') + lineno = int(match.group('line') or 0) + column = int(match.group('column') or 0) + level = match.group('level').lower() + message = match.group('message') + + path = os.path.abspath(path) + + if path.startswith(base_dir): + path = path[len(base_dir):] + + if path.startswith('rst/'): + path = 'docs/docsite/' + path # fix up paths reported relative to `docs/docsite/` + + if level == 'warning': + code = 'warning' + + for label, pattern in known_warnings.items(): + if re.search(pattern, message): + code = label + break + else: + code = 'error' + + print('%s:%d:%d: %s: %s' % (path, lineno, column, code, message)) + + +def simplify_stdout(value): + """Simplify output by omitting earlier 'rendering: ...' messages.""" + lines = value.strip().splitlines() + + rendering = [] + keep = [] + + def truncate_rendering(): + """Keep last rendering line (if any) with a message about omitted lines as needed.""" + if not rendering: + return + + notice = rendering[-1] + + if len(rendering) > 1: + notice += ' (%d previous rendering line(s) omitted)' % (len(rendering) - 1) + + keep.append(notice) + # Could change to rendering.clear() if we do not support python2 + rendering[:] = [] + + for line in lines: + if line.startswith('rendering: '): + rendering.append(line) + continue + + truncate_rendering() + keep.append(line) + + truncate_rendering() + + result = '\n'.join(keep) + + return result + + +if __name__ == '__main__': + main() diff --git a/test/sanity/code-smell/docs-build.requirements.in b/test/sanity/code-smell/docs-build.requirements.in new file mode 100644 index 0000000..02c3bfc --- /dev/null +++ b/test/sanity/code-smell/docs-build.requirements.in @@ -0,0 +1,9 @@ +jinja2 +pyyaml +resolvelib < 0.9.0 +sphinx == 4.2.0 +sphinx-notfound-page +sphinx-ansible-theme +straight.plugin +rstcheck < 4 # match version used in other sanity tests +antsibull-docs == 1.7.0 # currently approved version diff --git a/test/sanity/code-smell/docs-build.requirements.txt b/test/sanity/code-smell/docs-build.requirements.txt new file mode 100644 index 0000000..7e30a73 --- /dev/null +++ b/test/sanity/code-smell/docs-build.requirements.txt @@ -0,0 +1,50 @@ +# edit "docs-build.requirements.in" and generate with: hacking/update-sanity-requirements.py --test docs-build +aiofiles==22.1.0 +aiohttp==3.8.3 +aiosignal==1.2.0 +alabaster==0.7.12 +ansible-pygments==0.1.1 +antsibull-core==1.2.0 +antsibull-docs==1.7.0 +async-timeout==4.0.2 +asyncio-pool==0.6.0 +attrs==22.1.0 +Babel==2.10.3 +certifi==2022.9.14 +charset-normalizer==2.1.1 +docutils==0.17.1 +frozenlist==1.3.1 +idna==3.4 +imagesize==1.4.1 +Jinja2==3.1.2 +MarkupSafe==2.1.1 +multidict==6.0.2 +packaging==21.3 +perky==0.5.5 +pydantic==1.10.2 +Pygments==2.13.0 +pyparsing==3.0.9 +pytz==2022.2.1 +PyYAML==6.0 +requests==2.28.1 +resolvelib==0.8.1 +rstcheck==3.5.0 +semantic-version==2.10.0 +sh==1.14.3 +six==1.16.0 +snowballstemmer==2.2.0 +Sphinx==4.2.0 +sphinx-ansible-theme==0.9.1 +sphinx-notfound-page==0.8.3 +sphinx-rtd-theme==1.0.0 +sphinxcontrib-applehelp==1.0.2 +sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-htmlhelp==2.0.0 +sphinxcontrib-jsmath==1.0.1 +sphinxcontrib-qthelp==1.0.3 +sphinxcontrib-serializinghtml==1.1.5 +straight.plugin==1.5.0 +Twiggy==0.5.1 +typing_extensions==4.3.0 +urllib3==1.26.12 +yarl==1.8.1 diff --git a/test/sanity/code-smell/no-unwanted-files.json b/test/sanity/code-smell/no-unwanted-files.json new file mode 100644 index 0000000..7a89ebb --- /dev/null +++ b/test/sanity/code-smell/no-unwanted-files.json @@ -0,0 +1,7 @@ +{ + "include_symlinks": true, + "prefixes": [ + "lib/" + ], + "output": "path-message" +} diff --git a/test/sanity/code-smell/no-unwanted-files.py b/test/sanity/code-smell/no-unwanted-files.py new file mode 100644 index 0000000..7e13f53 --- /dev/null +++ b/test/sanity/code-smell/no-unwanted-files.py @@ -0,0 +1,49 @@ +"""Prevent unwanted files from being added to the source tree.""" +from __future__ import annotations + +import os +import sys + + +def main(): + """Main entry point.""" + paths = sys.argv[1:] or sys.stdin.read().splitlines() + + allowed_extensions = ( + '.cs', + '.ps1', + '.psm1', + '.py', + ) + + skip_paths = set([ + 'lib/ansible/config/ansible_builtin_runtime.yml', # not included in the sanity ignore file since it won't exist until after migration + ]) + + skip_directories = ( + 'lib/ansible/galaxy/data/', + ) + + allow_yaml = ('lib/ansible/plugins/test', 'lib/ansible/plugins/filter') + + for path in paths: + if path in skip_paths: + continue + + if any(path.startswith(skip_directory) for skip_directory in skip_directories): + continue + + if path.startswith('lib/') and not path.startswith('lib/ansible/'): + print('%s: all "lib" content must reside in the "lib/ansible" directory' % path) + continue + + ext = os.path.splitext(path)[1] + if ext in ('.yml', ) and any(path.startswith(yaml_directory) for yaml_directory in allow_yaml): + continue + + if ext not in allowed_extensions: + print('%s: extension must be one of: %s' % (path, ', '.join(allowed_extensions))) + + +if __name__ == '__main__': + main() diff --git a/test/sanity/code-smell/obsolete-files.json b/test/sanity/code-smell/obsolete-files.json new file mode 100644 index 0000000..02d3920 --- /dev/null +++ b/test/sanity/code-smell/obsolete-files.json @@ -0,0 +1,17 @@ +{ + "include_symlinks": true, + "prefixes": [ + "test/runner/", + "test/sanity/ansible-doc/", + "test/sanity/compile/", + "test/sanity/import/", + "test/sanity/pep8/", + "test/sanity/pslint/", + "test/sanity/pylint/", + "test/sanity/rstcheck/", + "test/sanity/shellcheck/", + "test/sanity/validate-modules/", + "test/sanity/yamllint/" + ], + "output": "path-message" +} diff --git a/test/sanity/code-smell/obsolete-files.py b/test/sanity/code-smell/obsolete-files.py new file mode 100644 index 0000000..3c1a4a4 --- /dev/null +++ b/test/sanity/code-smell/obsolete-files.py @@ -0,0 +1,17 @@ +"""Prevent files from being added to directories that are now obsolete.""" +from __future__ import annotations + +import os +import sys + + +def main(): + """Main entry point.""" + paths = sys.argv[1:] or sys.stdin.read().splitlines() + + for path in paths: + print('%s: directory "%s/" is obsolete and should not contain any files' % (path, os.path.dirname(path))) + + +if __name__ == '__main__': + main() diff --git a/test/sanity/code-smell/package-data.json b/test/sanity/code-smell/package-data.json new file mode 100644 index 0000000..0aa70a3 --- /dev/null +++ b/test/sanity/code-smell/package-data.json @@ -0,0 +1,5 @@ +{ + "disabled": true, + "all_targets": true, + "output": "path-message" +} diff --git a/test/sanity/code-smell/package-data.py b/test/sanity/code-smell/package-data.py new file mode 100644 index 0000000..0c6e761 --- /dev/null +++ b/test/sanity/code-smell/package-data.py @@ -0,0 +1,405 @@ +from __future__ import annotations + +import contextlib +import fnmatch +import glob +import os +import re +import shutil +import subprocess +import sys +import tarfile +import tempfile + + +def assemble_files_to_ship(complete_file_list): + """ + This looks for all files which should be shipped in the sdist + """ + # All files which are in the repository except these: + ignore_patterns = ( + # Developer-only tools + '.azure-pipelines/*', + '.github/*', + '.github/*/*', + 'changelogs/fragments/*', + 'hacking/backport/*', + 'hacking/azp/*', + 'hacking/tests/*', + 'hacking/ticket_stubs/*', + 'test/sanity/code-smell/botmeta.*', + 'test/sanity/code-smell/release-names.*', + 'test/utils/*', + 'test/utils/*/*', + 'test/utils/*/*/*', + 'test/results/.tmp/*', + 'test/results/.tmp/*/*', + 'test/results/.tmp/*/*/*', + 'test/results/.tmp/*/*/*/*', + 'test/results/.tmp/*/*/*/*/*', + '.git*', + ) + ignore_files = frozenset(( + # Developer-only tools + 'changelogs/config.yaml', + 'hacking/README.md', + 'hacking/ansible-profile', + 'hacking/cgroup_perf_recap_graph.py', + 'hacking/create_deprecated_issues.py', + 'hacking/deprecated_issue_template.md', + 'hacking/create_deprecation_bug_reports.py', + 'hacking/fix_test_syntax.py', + 'hacking/get_library.py', + 'hacking/metadata-tool.py', + 'hacking/report.py', + 'hacking/return_skeleton_generator.py', + 'hacking/test-module', + 'test/support/README.md', + 'test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py', + 'test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py', + '.cherry_picker.toml', + '.mailmap', + # Generated as part of a build step + 'docs/docsite/rst/conf.py', + 'docs/docsite/rst/index.rst', + 'docs/docsite/rst/dev_guide/index.rst', + # Possibly should be included + 'examples/scripts/uptime.py', + 'examples/scripts/my_test.py', + 'examples/scripts/my_test_info.py', + 'examples/scripts/my_test_facts.py', + 'examples/DOCUMENTATION.yml', + 'examples/play.yml', + 'examples/hosts.yaml', + 'examples/hosts.yml', + 'examples/inventory_script_schema.json', + 'examples/plugin_filters.yml', + 'hacking/env-setup', + 'hacking/env-setup.fish', + 'MANIFEST', + 'setup.cfg', + # docs for test files not included in sdist + 'docs/docsite/rst/dev_guide/testing/sanity/bin-symlinks.rst', + 'docs/docsite/rst/dev_guide/testing/sanity/botmeta.rst', + 'docs/docsite/rst/dev_guide/testing/sanity/integration-aliases.rst', + 'docs/docsite/rst/dev_guide/testing/sanity/release-names.rst', + )) + + # These files are generated and then intentionally added to the sdist + + # Manpages + ignore_script = ('ansible-connection', 'ansible-test') + manpages = ['docs/man/man1/ansible.1'] + for dirname, dummy, files in os.walk('bin'): + for filename in files: + if filename in ignore_script: + continue + manpages.append('docs/man/man1/%s.1' % filename) + + # Misc + misc_generated_files = [ + 'PKG-INFO', + ] + + shipped_files = manpages + misc_generated_files + + for path in complete_file_list: + if path not in ignore_files: + for ignore in ignore_patterns: + if fnmatch.fnmatch(path, ignore): + break + else: + shipped_files.append(path) + + return shipped_files + + +def assemble_files_to_install(complete_file_list): + """ + This looks for all of the files which should show up in an installation of ansible + """ + ignore_patterns = ( + # Tests excluded from sdist + 'test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py', + 'test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py', + ) + + pkg_data_files = [] + for path in complete_file_list: + + if path.startswith("lib/ansible"): + prefix = 'lib' + elif path.startswith("test/lib/ansible_test"): + prefix = 'test/lib' + else: + continue + + for ignore in ignore_patterns: + if fnmatch.fnmatch(path, ignore): + break + else: + pkg_data_files.append(os.path.relpath(path, prefix)) + + return pkg_data_files + + +@contextlib.contextmanager +def clean_repository(file_list): + """Copy the repository to clean it of artifacts""" + # Create a tempdir that will be the clean repo + with tempfile.TemporaryDirectory() as repo_root: + directories = set((repo_root + os.path.sep,)) + + for filename in file_list: + # Determine if we need to create the directory + directory = os.path.dirname(filename) + dest_dir = os.path.join(repo_root, directory) + if dest_dir not in directories: + os.makedirs(dest_dir) + + # Keep track of all the directories that now exist + path_components = directory.split(os.path.sep) + path = repo_root + for component in path_components: + path = os.path.join(path, component) + if path not in directories: + directories.add(path) + + # Copy the file + shutil.copy2(filename, dest_dir, follow_symlinks=False) + + yield repo_root + + +def create_sdist(tmp_dir): + """Create an sdist in the repository""" + create = subprocess.run( + ['make', 'snapshot', 'SDIST_DIR=%s' % tmp_dir], + stdin=subprocess.DEVNULL, + capture_output=True, + text=True, + check=False, + ) + + stderr = create.stderr + + if create.returncode != 0: + raise Exception('make snapshot failed:\n%s' % stderr) + + # Determine path to sdist + tmp_dir_files = os.listdir(tmp_dir) + + if not tmp_dir_files: + raise Exception('sdist was not created in the temp dir') + elif len(tmp_dir_files) > 1: + raise Exception('Unexpected extra files in the temp dir') + + return os.path.join(tmp_dir, tmp_dir_files[0]) + + +def extract_sdist(sdist_path, tmp_dir): + """Untar the sdist""" + # Untar the sdist from the tmp_dir + with tarfile.open(os.path.join(tmp_dir, sdist_path), 'r|*') as sdist: + sdist.extractall(path=tmp_dir) + + # Determine the sdist directory name + sdist_filename = os.path.basename(sdist_path) + tmp_dir_files = os.listdir(tmp_dir) + try: + tmp_dir_files.remove(sdist_filename) + except ValueError: + # Unexpected could not find original sdist in temp dir + raise + + if len(tmp_dir_files) > 1: + raise Exception('Unexpected extra files in the temp dir') + elif len(tmp_dir_files) < 1: + raise Exception('sdist extraction did not occur i nthe temp dir') + + return os.path.join(tmp_dir, tmp_dir_files[0]) + + +def install_sdist(tmp_dir, sdist_dir): + """Install the extracted sdist into the temporary directory""" + install = subprocess.run( + ['python', 'setup.py', 'install', '--root=%s' % tmp_dir], + stdin=subprocess.DEVNULL, + capture_output=True, + text=True, + cwd=os.path.join(tmp_dir, sdist_dir), + check=False, + ) + + stdout, stderr = install.stdout, install.stderr + + if install.returncode != 0: + raise Exception('sdist install failed:\n%s' % stderr) + + # Determine the prefix for the installed files + match = re.search('^copying .* -> (%s/.*?/(?:site|dist)-packages)/ansible$' % + tmp_dir, stdout, flags=re.M) + + return match.group(1) + + +def check_sdist_contains_expected(sdist_dir, to_ship_files): + """Check that the files we expect to ship are present in the sdist""" + results = [] + for filename in to_ship_files: + path = os.path.join(sdist_dir, filename) + if not os.path.exists(path): + results.append('%s: File was not added to sdist' % filename) + + # Also changelog + changelog_files = glob.glob(os.path.join(sdist_dir, 'changelogs/CHANGELOG-v2.[0-9]*.rst')) + if not changelog_files: + results.append('changelogs/CHANGELOG-v2.*.rst: Changelog file was not added to the sdist') + elif len(changelog_files) > 1: + results.append('changelogs/CHANGELOG-v2.*.rst: Too many changelog files: %s' + % changelog_files) + + return results + + +def check_sdist_files_are_wanted(sdist_dir, to_ship_files): + """Check that all files in the sdist are desired""" + results = [] + for dirname, dummy, files in os.walk(sdist_dir): + dirname = os.path.relpath(dirname, start=sdist_dir) + if dirname == '.': + dirname = '' + + for filename in files: + if filename == 'setup.cfg': + continue + + path = os.path.join(dirname, filename) + if path not in to_ship_files: + + if fnmatch.fnmatch(path, 'changelogs/CHANGELOG-v2.[0-9]*.rst'): + # changelog files are expected + continue + + if fnmatch.fnmatch(path, 'lib/ansible_core.egg-info/*'): + continue + + # FIXME: ansible-test doesn't pass the paths of symlinks to us so we aren't + # checking those + if not os.path.islink(os.path.join(sdist_dir, path)): + results.append('%s: File in sdist was not in the repository' % path) + + return results + + +def check_installed_contains_expected(install_dir, to_install_files): + """Check that all the files we expect to be installed are""" + results = [] + for filename in to_install_files: + path = os.path.join(install_dir, filename) + if not os.path.exists(path): + results.append('%s: File not installed' % os.path.join('lib', filename)) + + return results + + +EGG_RE = re.compile('ansible[^/]+\\.egg-info/(PKG-INFO|SOURCES.txt|' + 'dependency_links.txt|not-zip-safe|requires.txt|top_level.txt|entry_points.txt)$') + + +def check_installed_files_are_wanted(install_dir, to_install_files): + """Check that all installed files were desired""" + results = [] + + for dirname, dummy, files in os.walk(install_dir): + dirname = os.path.relpath(dirname, start=install_dir) + if dirname == '.': + dirname = '' + + for filename in files: + # If this is a byte code cache, look for the python file's name + directory = dirname + if filename.endswith('.pyc') or filename.endswith('.pyo'): + # Remove the trailing "o" or c" + filename = filename[:-1] + + if directory.endswith('%s__pycache__' % os.path.sep): + # Python3 byte code cache, look for the basename of + # __pycache__/__init__.cpython-36.py + segments = filename.rsplit('.', 2) + if len(segments) >= 3: + filename = '.'.join((segments[0], segments[2])) + directory = os.path.dirname(directory) + + path = os.path.join(directory, filename) + + # Test that the file was listed for installation + if path not in to_install_files: + # FIXME: ansible-test doesn't pass the paths of symlinks to us so we + # aren't checking those + if not os.path.islink(os.path.join(install_dir, path)): + if not EGG_RE.match(path): + results.append('%s: File was installed but was not supposed to be' % path) + + return results + + +def _find_symlinks(): + symlink_list = [] + for dirname, directories, filenames in os.walk('.'): + for filename in filenames: + path = os.path.join(dirname, filename) + # Strip off "./" from the front + path = path[2:] + if os.path.islink(path): + symlink_list.append(path) + + return symlink_list + + +def main(): + """All of the files in the repository""" + complete_file_list = [] + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + complete_file_list.append(path) + + # ansible-test isn't currently passing symlinks to us so construct those ourselves for now + for filename in _find_symlinks(): + if filename not in complete_file_list: + # For some reason ansible-test is passing us lib/ansible/module_utils/ansible_release.py + # which is a symlink even though it doesn't pass any others + complete_file_list.append(filename) + + # We may run this after docs sanity tests so get a clean repository to run in + with clean_repository(complete_file_list) as clean_repo_dir: + os.chdir(clean_repo_dir) + + to_ship_files = assemble_files_to_ship(complete_file_list) + to_install_files = assemble_files_to_install(complete_file_list) + + results = [] + with tempfile.TemporaryDirectory() as tmp_dir: + sdist_path = create_sdist(tmp_dir) + sdist_dir = extract_sdist(sdist_path, tmp_dir) + + # Check that the files that are supposed to be in the sdist are there + results.extend(check_sdist_contains_expected(sdist_dir, to_ship_files)) + + # Check that the files that are in the sdist are in the repository + results.extend(check_sdist_files_are_wanted(sdist_dir, to_ship_files)) + + # install the sdist + install_dir = install_sdist(tmp_dir, sdist_dir) + + # Check that the files that are supposed to be installed are there + results.extend(check_installed_contains_expected(install_dir, to_install_files)) + + # Check that the files that are installed are supposed to be installed + results.extend(check_installed_files_are_wanted(install_dir, to_install_files)) + + for message in results: + print(message) + + +if __name__ == '__main__': + main() diff --git a/test/sanity/code-smell/package-data.requirements.in b/test/sanity/code-smell/package-data.requirements.in new file mode 100644 index 0000000..6b58f75 --- /dev/null +++ b/test/sanity/code-smell/package-data.requirements.in @@ -0,0 +1,7 @@ +docutils < 0.18 # match version required by sphinx in the docs-build sanity test +jinja2 +pyyaml # ansible-core requirement +resolvelib < 0.9.0 +rstcheck < 4 # match version used in other sanity tests +straight.plugin +antsibull-changelog diff --git a/test/sanity/code-smell/package-data.requirements.txt b/test/sanity/code-smell/package-data.requirements.txt new file mode 100644 index 0000000..94ad68f --- /dev/null +++ b/test/sanity/code-smell/package-data.requirements.txt @@ -0,0 +1,12 @@ +# edit "package-data.requirements.in" and generate with: hacking/update-sanity-requirements.py --test package-data +antsibull-changelog==0.16.0 +docutils==0.17.1 +Jinja2==3.1.2 +MarkupSafe==2.1.1 +packaging==21.3 +pyparsing==3.0.9 +PyYAML==6.0 +resolvelib==0.8.1 +rstcheck==3.5.0 +semantic-version==2.10.0 +straight.plugin==1.5.0 diff --git a/test/sanity/code-smell/required-and-default-attributes.json b/test/sanity/code-smell/required-and-default-attributes.json new file mode 100644 index 0000000..dd9ac7b --- /dev/null +++ b/test/sanity/code-smell/required-and-default-attributes.json @@ -0,0 +1,9 @@ +{ + "prefixes": [ + "lib/ansible/" + ], + "extensions": [ + ".py" + ], + "output": "path-line-column-message" +} diff --git a/test/sanity/code-smell/required-and-default-attributes.py b/test/sanity/code-smell/required-and-default-attributes.py new file mode 100644 index 0000000..900829d --- /dev/null +++ b/test/sanity/code-smell/required-and-default-attributes.py @@ -0,0 +1,19 @@ +from __future__ import annotations + +import re +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'r') as path_fd: + for line, text in enumerate(path_fd.readlines()): + match = re.search(r'(FieldAttribute.*(default|required).*(default|required))', text) + + if match: + print('%s:%d:%d: use only one of `default` or `required` with `FieldAttribute`' % ( + path, line + 1, match.start(1) + 1)) + + +if __name__ == '__main__': + main() diff --git a/test/sanity/code-smell/rstcheck.json b/test/sanity/code-smell/rstcheck.json new file mode 100644 index 0000000..870c19f --- /dev/null +++ b/test/sanity/code-smell/rstcheck.json @@ -0,0 +1,6 @@ +{ + "output": "path-line-column-message", + "extensions": [ + ".rst" + ] +} diff --git a/test/sanity/code-smell/rstcheck.py b/test/sanity/code-smell/rstcheck.py new file mode 100644 index 0000000..99917ca --- /dev/null +++ b/test/sanity/code-smell/rstcheck.py @@ -0,0 +1,62 @@ +"""Sanity test using rstcheck and sphinx.""" +from __future__ import annotations + +import re +import subprocess +import sys + + +def main(): + paths = sys.argv[1:] or sys.stdin.read().splitlines() + + encoding = 'utf-8' + + ignore_substitutions = ( + 'br', + ) + + cmd = [ + sys.executable, + '-m', 'rstcheck', + '--report', 'warning', + '--ignore-substitutions', ','.join(ignore_substitutions), + ] + paths + + process = subprocess.run(cmd, + stdin=subprocess.DEVNULL, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + ) + + if process.stdout: + raise Exception(process.stdout) + + pattern = re.compile(r'^(?P<path>[^:]*):(?P<line>[0-9]+): \((?P<level>INFO|WARNING|ERROR|SEVERE)/[0-4]\) (?P<message>.*)$') + + results = parse_to_list_of_dict(pattern, process.stderr.decode(encoding)) + + for result in results: + print('%s:%s:%s: %s' % (result['path'], result['line'], 0, result['message'])) + + +def parse_to_list_of_dict(pattern, value): + matched = [] + unmatched = [] + + for line in value.splitlines(): + match = re.search(pattern, line) + + if match: + matched.append(match.groupdict()) + else: + unmatched.append(line) + + if unmatched: + raise Exception('Pattern "%s" did not match values:\n%s' % (pattern, '\n'.join(unmatched))) + + return matched + + +if __name__ == '__main__': + main() diff --git a/test/sanity/code-smell/rstcheck.requirements.in b/test/sanity/code-smell/rstcheck.requirements.in new file mode 100644 index 0000000..5b93841 --- /dev/null +++ b/test/sanity/code-smell/rstcheck.requirements.in @@ -0,0 +1,3 @@ +sphinx == 4.2.0 # required for full rstcheck functionality, installed first to get the correct docutils version +rstcheck < 4 # match version used in other sanity tests +jinja2 # ansible-core requirement diff --git a/test/sanity/code-smell/rstcheck.requirements.txt b/test/sanity/code-smell/rstcheck.requirements.txt new file mode 100644 index 0000000..81d5c4f --- /dev/null +++ b/test/sanity/code-smell/rstcheck.requirements.txt @@ -0,0 +1,25 @@ +# edit "rstcheck.requirements.in" and generate with: hacking/update-sanity-requirements.py --test rstcheck +alabaster==0.7.12 +Babel==2.10.3 +certifi==2022.9.14 +charset-normalizer==2.1.1 +docutils==0.17.1 +idna==3.4 +imagesize==1.4.1 +Jinja2==3.1.2 +MarkupSafe==2.1.1 +packaging==21.3 +Pygments==2.13.0 +pyparsing==3.0.9 +pytz==2022.2.1 +requests==2.28.1 +rstcheck==3.5.0 +snowballstemmer==2.2.0 +Sphinx==4.2.0 +sphinxcontrib-applehelp==1.0.2 +sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-htmlhelp==2.0.0 +sphinxcontrib-jsmath==1.0.1 +sphinxcontrib-qthelp==1.0.3 +sphinxcontrib-serializinghtml==1.1.5 +urllib3==1.26.12 diff --git a/test/sanity/code-smell/skip.txt b/test/sanity/code-smell/skip.txt new file mode 100644 index 0000000..6fb327b --- /dev/null +++ b/test/sanity/code-smell/skip.txt @@ -0,0 +1,2 @@ +deprecated-config.py # disabled by default, to be enabled by the release manager after branching +update-bundled.py # disabled by default, to be enabled by the release manager after branching diff --git a/test/sanity/code-smell/test-constraints.json b/test/sanity/code-smell/test-constraints.json new file mode 100644 index 0000000..8f47beb --- /dev/null +++ b/test/sanity/code-smell/test-constraints.json @@ -0,0 +1,11 @@ +{ + "all_targets": true, + "prefixes": [ + "test/lib/ansible_test/_data/requirements/", + "test/sanity/code-smell/" + ], + "extensions": [ + ".txt" + ], + "output": "path-line-column-message" +} diff --git a/test/sanity/code-smell/test-constraints.py b/test/sanity/code-smell/test-constraints.py new file mode 100644 index 0000000..df30fe1 --- /dev/null +++ b/test/sanity/code-smell/test-constraints.py @@ -0,0 +1,126 @@ +from __future__ import annotations + +import os +import pathlib +import re +import sys + + +def main(): + constraints_path = 'test/lib/ansible_test/_data/requirements/constraints.txt' + + requirements = {} + + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + if path == 'test/lib/ansible_test/_data/requirements/ansible.txt': + # This file is an exact copy of the ansible requirements.txt and should not conflict with other constraints. + continue + + with open(path, 'r') as path_fd: + requirements[path] = parse_requirements(path_fd.read().splitlines()) + + if path == 'test/lib/ansible_test/_data/requirements/ansible-test.txt': + # Special handling is required for ansible-test's requirements file. + check_ansible_test(path, requirements.pop(path)) + continue + + frozen_sanity = {} + non_sanity_requirements = set() + + for path, requirements in requirements.items(): + filename = os.path.basename(path) + + is_sanity = filename.startswith('sanity.') or filename.endswith('.requirements.txt') + is_constraints = path == constraints_path + + for lineno, line, requirement in requirements: + if not requirement: + print('%s:%d:%d: cannot parse requirement: %s' % (path, lineno, 1, line)) + continue + + name = requirement.group('name').lower() + raw_constraints = requirement.group('constraints') + constraints = raw_constraints.strip() + comment = requirement.group('comment') + + is_pinned = re.search('^ *== *[0-9.]+(\\.post[0-9]+)?$', constraints) + + if is_sanity: + sanity = frozen_sanity.setdefault(name, []) + sanity.append((path, lineno, line, requirement)) + elif not is_constraints: + non_sanity_requirements.add(name) + + if is_sanity: + if not is_pinned: + # sanity test requirements must be pinned + print('%s:%d:%d: sanity test requirement (%s%s) must be frozen (use `==`)' % (path, lineno, 1, name, raw_constraints)) + + continue + + if constraints and not is_constraints: + allow_constraints = 'sanity_ok' in comment + + if not allow_constraints: + # keeping constraints for tests other than sanity tests in one file helps avoid conflicts + print('%s:%d:%d: put the constraint (%s%s) in `%s`' % (path, lineno, 1, name, raw_constraints, constraints_path)) + + for name, requirements in frozen_sanity.items(): + if len(set(req[3].group('constraints').strip() for req in requirements)) != 1: + for req in requirements: + print('%s:%d:%d: sanity constraint (%s) does not match others for package `%s`' % ( + req[0], req[1], req[3].start('constraints') + 1, req[3].group('constraints'), name)) + + +def check_ansible_test(path: str, requirements: list[tuple[int, str, re.Match]]) -> None: + sys.path.insert(0, str(pathlib.Path(__file__).parent.parent.parent.joinpath('lib'))) + + from ansible_test._internal.python_requirements import VIRTUALENV_VERSION + from ansible_test._internal.coverage_util import COVERAGE_VERSIONS + from ansible_test._internal.util import version_to_str + + expected_lines = set([ + f"virtualenv == {VIRTUALENV_VERSION} ; python_version < '3'", + ] + [ + f"coverage == {item.coverage_version} ; python_version >= '{version_to_str(item.min_python)}' and python_version <= '{version_to_str(item.max_python)}'" + for item in COVERAGE_VERSIONS + ]) + + for idx, requirement in enumerate(requirements): + lineno, line, match = requirement + + if line in expected_lines: + expected_lines.remove(line) + continue + + print('%s:%d:%d: unexpected line: %s' % (path, lineno, 1, line)) + + for expected_line in sorted(expected_lines): + print('%s:%d:%d: missing line: %s' % (path, requirements[-1][0] + 1, 1, expected_line)) + + +def parse_requirements(lines): + # see https://www.python.org/dev/peps/pep-0508/#names + pattern = re.compile(r'^(?P<name>[A-Z0-9][A-Z0-9._-]*[A-Z0-9]|[A-Z0-9])(?P<extras> *\[[^]]*])?(?P<constraints>[^;#]*)(?P<markers>[^#]*)(?P<comment>.*)$', + re.IGNORECASE) + + matches = [(lineno, line, pattern.search(line)) for lineno, line in enumerate(lines, start=1)] + requirements = [] + + for lineno, line, match in matches: + if not line.strip(): + continue + + if line.strip().startswith('#'): + continue + + if line.startswith('git+https://'): + continue # hack to ignore git requirements + + requirements.append((lineno, line, match)) + + return requirements + + +if __name__ == '__main__': + main() diff --git a/test/sanity/code-smell/update-bundled.json b/test/sanity/code-smell/update-bundled.json new file mode 100644 index 0000000..379bf4d --- /dev/null +++ b/test/sanity/code-smell/update-bundled.json @@ -0,0 +1,8 @@ +{ + "all_targets": true, + "ignore_self": true, + "extensions": [ + ".py" + ], + "output": "path-message" +} diff --git a/test/sanity/code-smell/update-bundled.py b/test/sanity/code-smell/update-bundled.py new file mode 100644 index 0000000..4bad77a --- /dev/null +++ b/test/sanity/code-smell/update-bundled.py @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- +# (c) 2018, Ansible Project +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. +""" +This test checks whether the libraries we're bundling are out of date and need to be synced with +a newer upstream release. +""" + + +from __future__ import annotations + +import fnmatch +import json +import re +import sys +from ansible.module_utils.compat.version import LooseVersion + +import packaging.specifiers + +from ansible.module_utils.urls import open_url + + +BUNDLED_RE = re.compile(b'\\b_BUNDLED_METADATA\\b') + + +def get_bundled_libs(paths): + """ + Return the set of known bundled libraries + + :arg paths: The paths which the test has been instructed to check + :returns: The list of all files which we know to contain bundled libraries. If a bundled + library consists of multiple files, this should be the file which has metadata included. + """ + bundled_libs = set() + for filename in fnmatch.filter(paths, 'lib/ansible/compat/*/__init__.py'): + bundled_libs.add(filename) + + bundled_libs.add('lib/ansible/module_utils/compat/selectors.py') + bundled_libs.add('lib/ansible/module_utils/distro/__init__.py') + bundled_libs.add('lib/ansible/module_utils/six/__init__.py') + # backports.ssl_match_hostname should be moved to its own file in the future + bundled_libs.add('lib/ansible/module_utils/urls.py') + + return bundled_libs + + +def get_files_with_bundled_metadata(paths): + """ + Search for any files which have bundled metadata inside of them + + :arg paths: Iterable of filenames to search for metadata inside of + :returns: A set of pathnames which contained metadata + """ + + with_metadata = set() + for path in paths: + with open(path, 'rb') as f: + body = f.read() + + if BUNDLED_RE.search(body): + with_metadata.add(path) + + return with_metadata + + +def get_bundled_metadata(filename): + """ + Retrieve the metadata about a bundled library from a python file + + :arg filename: The filename to look inside for the metadata + :raises ValueError: If we're unable to extract metadata from the file + :returns: The metadata from the python file + """ + with open(filename, 'r') as module: + for line in module: + if line.strip().startswith('# NOT_BUNDLED'): + return None + + if line.strip().startswith('# CANT_UPDATE'): + print( + '{0} marked as CANT_UPDATE, so skipping. Manual ' + 'check for CVEs required.'.format(filename)) + return None + + if line.strip().startswith('_BUNDLED_METADATA'): + data = line[line.index('{'):].strip() + break + else: + raise ValueError('Unable to check bundled library for update. Please add' + ' _BUNDLED_METADATA dictionary to the library file with' + ' information on pypi name and bundled version.') + metadata = json.loads(data) + return metadata + + +def get_latest_applicable_version(pypi_data, constraints=None): + """Get the latest pypi version of the package that we allow + + :arg pypi_data: Pypi information about the data as returned by + ``https://pypi.org/pypi/{pkg_name}/json`` + :kwarg constraints: version constraints on what we're allowed to use as specified by + the bundled metadata + :returns: The most recent version on pypi that are allowed by ``constraints`` + """ + latest_version = "0" + if constraints: + version_specification = packaging.specifiers.SpecifierSet(constraints) + for version in pypi_data['releases']: + if version in version_specification: + if LooseVersion(version) > LooseVersion(latest_version): + latest_version = version + else: + latest_version = pypi_data['info']['version'] + + return latest_version + + +def main(): + """Entrypoint to the script""" + + paths = sys.argv[1:] or sys.stdin.read().splitlines() + + bundled_libs = get_bundled_libs(paths) + files_with_bundled_metadata = get_files_with_bundled_metadata(paths) + + for filename in files_with_bundled_metadata.difference(bundled_libs): + if filename.startswith('test/support/'): + continue # bundled support code does not need to be updated or tracked + + print('{0}: ERROR: File contains _BUNDLED_METADATA but needs to be added to' + ' test/sanity/code-smell/update-bundled.py'.format(filename)) + + for filename in bundled_libs: + try: + metadata = get_bundled_metadata(filename) + except ValueError as e: + print('{0}: ERROR: {1}'.format(filename, e)) + continue + except (IOError, OSError) as e: + if e.errno == 2: + print('{0}: ERROR: {1}. Perhaps the bundled library has been removed' + ' or moved and the bundled library test needs to be modified as' + ' well?'.format(filename, e)) + + if metadata is None: + continue + + pypi_fh = open_url('https://pypi.org/pypi/{0}/json'.format(metadata['pypi_name'])) + pypi_data = json.loads(pypi_fh.read().decode('utf-8')) + + constraints = metadata.get('version_constraints', None) + latest_version = get_latest_applicable_version(pypi_data, constraints) + + if LooseVersion(metadata['version']) < LooseVersion(latest_version): + print('{0}: UPDATE {1} from {2} to {3} {4}'.format( + filename, + metadata['pypi_name'], + metadata['version'], + latest_version, + 'https://pypi.org/pypi/{0}/json'.format(metadata['pypi_name']))) + + +if __name__ == '__main__': + main() diff --git a/test/sanity/code-smell/update-bundled.requirements.in b/test/sanity/code-smell/update-bundled.requirements.in new file mode 100644 index 0000000..748809f --- /dev/null +++ b/test/sanity/code-smell/update-bundled.requirements.in @@ -0,0 +1 @@ +packaging diff --git a/test/sanity/code-smell/update-bundled.requirements.txt b/test/sanity/code-smell/update-bundled.requirements.txt new file mode 100644 index 0000000..d9785e7 --- /dev/null +++ b/test/sanity/code-smell/update-bundled.requirements.txt @@ -0,0 +1,3 @@ +# edit "update-bundled.requirements.in" and generate with: hacking/update-sanity-requirements.py --test update-bundled +packaging==21.3 +pyparsing==3.0.9 diff --git a/test/sanity/ignore.txt b/test/sanity/ignore.txt new file mode 100644 index 0000000..660628f --- /dev/null +++ b/test/sanity/ignore.txt @@ -0,0 +1,232 @@ +.azure-pipelines/scripts/publish-codecov.py replace-urlopen +docs/docsite/rst/dev_guide/testing/sanity/no-smart-quotes.rst no-smart-quotes +docs/docsite/rst/locales/ja/LC_MESSAGES/dev_guide.po no-smart-quotes # Translation of the no-smart-quotes rule +examples/scripts/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath +examples/scripts/upgrade_to_ps3.ps1 pslint:PSCustomUseLiteralPath +examples/scripts/upgrade_to_ps3.ps1 pslint:PSUseApprovedVerbs +lib/ansible/cli/scripts/ansible_connection_cli_stub.py shebang +lib/ansible/config/base.yml no-unwanted-files +lib/ansible/executor/playbook_executor.py pylint:disallowed-name +lib/ansible/executor/powershell/async_watchdog.ps1 pslint:PSCustomUseLiteralPath +lib/ansible/executor/powershell/async_wrapper.ps1 pslint:PSCustomUseLiteralPath +lib/ansible/executor/powershell/exec_wrapper.ps1 pslint:PSCustomUseLiteralPath +lib/ansible/executor/task_queue_manager.py pylint:disallowed-name +lib/ansible/keyword_desc.yml no-unwanted-files +lib/ansible/modules/apt.py validate-modules:parameter-invalid +lib/ansible/modules/apt_repository.py validate-modules:parameter-invalid +lib/ansible/modules/assemble.py validate-modules:nonexistent-parameter-documented +lib/ansible/modules/async_status.py use-argspec-type-path +lib/ansible/modules/async_status.py validate-modules!skip +lib/ansible/modules/async_wrapper.py ansible-doc!skip # not an actual module +lib/ansible/modules/async_wrapper.py pylint:ansible-bad-function # ignore, required +lib/ansible/modules/async_wrapper.py use-argspec-type-path +lib/ansible/modules/blockinfile.py validate-modules:doc-choices-do-not-match-spec +lib/ansible/modules/blockinfile.py validate-modules:doc-default-does-not-match-spec +lib/ansible/modules/command.py validate-modules:doc-default-does-not-match-spec # _uses_shell is undocumented +lib/ansible/modules/command.py validate-modules:doc-missing-type +lib/ansible/modules/command.py validate-modules:nonexistent-parameter-documented +lib/ansible/modules/command.py validate-modules:undocumented-parameter +lib/ansible/modules/copy.py pylint:disallowed-name +lib/ansible/modules/copy.py validate-modules:doc-default-does-not-match-spec +lib/ansible/modules/copy.py validate-modules:nonexistent-parameter-documented +lib/ansible/modules/copy.py validate-modules:undocumented-parameter +lib/ansible/modules/dnf.py validate-modules:doc-required-mismatch +lib/ansible/modules/dnf.py validate-modules:parameter-invalid +lib/ansible/modules/file.py validate-modules:undocumented-parameter +lib/ansible/modules/find.py use-argspec-type-path # fix needed +lib/ansible/modules/git.py pylint:disallowed-name +lib/ansible/modules/git.py use-argspec-type-path +lib/ansible/modules/git.py validate-modules:doc-missing-type +lib/ansible/modules/git.py validate-modules:doc-required-mismatch +lib/ansible/modules/iptables.py pylint:disallowed-name +lib/ansible/modules/lineinfile.py validate-modules:doc-choices-do-not-match-spec +lib/ansible/modules/lineinfile.py validate-modules:doc-default-does-not-match-spec +lib/ansible/modules/lineinfile.py validate-modules:nonexistent-parameter-documented +lib/ansible/modules/package_facts.py validate-modules:doc-choices-do-not-match-spec +lib/ansible/modules/pip.py pylint:disallowed-name +lib/ansible/modules/replace.py validate-modules:nonexistent-parameter-documented +lib/ansible/modules/service.py validate-modules:nonexistent-parameter-documented +lib/ansible/modules/service.py validate-modules:use-run-command-not-popen +lib/ansible/modules/stat.py validate-modules:doc-default-does-not-match-spec # get_md5 is undocumented +lib/ansible/modules/stat.py validate-modules:parameter-invalid +lib/ansible/modules/stat.py validate-modules:parameter-type-not-in-doc +lib/ansible/modules/stat.py validate-modules:undocumented-parameter +lib/ansible/modules/systemd_service.py validate-modules:parameter-invalid +lib/ansible/modules/systemd_service.py validate-modules:return-syntax-error +lib/ansible/modules/sysvinit.py validate-modules:return-syntax-error +lib/ansible/modules/uri.py validate-modules:doc-required-mismatch +lib/ansible/modules/user.py validate-modules:doc-default-does-not-match-spec +lib/ansible/modules/user.py validate-modules:use-run-command-not-popen +lib/ansible/modules/yum.py pylint:disallowed-name +lib/ansible/modules/yum.py validate-modules:parameter-invalid +lib/ansible/modules/yum_repository.py validate-modules:doc-default-does-not-match-spec +lib/ansible/modules/yum_repository.py validate-modules:parameter-type-not-in-doc +lib/ansible/modules/yum_repository.py validate-modules:undocumented-parameter +lib/ansible/module_utils/compat/_selectors2.py future-import-boilerplate # ignore bundled +lib/ansible/module_utils/compat/_selectors2.py metaclass-boilerplate # ignore bundled +lib/ansible/module_utils/compat/_selectors2.py pylint:disallowed-name +lib/ansible/module_utils/compat/selinux.py import-2.7!skip # pass/fail depends on presence of libselinux.so +lib/ansible/module_utils/compat/selinux.py import-3.5!skip # pass/fail depends on presence of libselinux.so +lib/ansible/module_utils/compat/selinux.py import-3.6!skip # pass/fail depends on presence of libselinux.so +lib/ansible/module_utils/compat/selinux.py import-3.7!skip # pass/fail depends on presence of libselinux.so +lib/ansible/module_utils/compat/selinux.py import-3.8!skip # pass/fail depends on presence of libselinux.so +lib/ansible/module_utils/compat/selinux.py import-3.9!skip # pass/fail depends on presence of libselinux.so +lib/ansible/module_utils/distro/_distro.py future-import-boilerplate # ignore bundled +lib/ansible/module_utils/distro/_distro.py metaclass-boilerplate # ignore bundled +lib/ansible/module_utils/distro/_distro.py no-assert +lib/ansible/module_utils/distro/_distro.py pylint:using-constant-test # bundled code we don't want to modify +lib/ansible/module_utils/distro/_distro.py pep8!skip # bundled code we don't want to modify +lib/ansible/module_utils/distro/__init__.py empty-init # breaks namespacing, bundled, do not override +lib/ansible/module_utils/facts/__init__.py empty-init # breaks namespacing, deprecate and eventually remove +lib/ansible/module_utils/facts/network/linux.py pylint:disallowed-name +lib/ansible/module_utils/powershell/Ansible.ModuleUtils.ArgvParser.psm1 pslint:PSUseApprovedVerbs +lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSProvideCommentHelp # need to agree on best format for comment location +lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSUseApprovedVerbs +lib/ansible/module_utils/powershell/Ansible.ModuleUtils.FileUtil.psm1 pslint:PSCustomUseLiteralPath +lib/ansible/module_utils/powershell/Ansible.ModuleUtils.FileUtil.psm1 pslint:PSProvideCommentHelp +lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1 pslint:PSCustomUseLiteralPath +lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1 pslint:PSUseApprovedVerbs +lib/ansible/module_utils/powershell/Ansible.ModuleUtils.LinkUtil.psm1 pslint:PSUseApprovedVerbs +lib/ansible/module_utils/pycompat24.py no-get-exception +lib/ansible/module_utils/six/__init__.py empty-init # breaks namespacing, bundled, do not override +lib/ansible/module_utils/six/__init__.py future-import-boilerplate # ignore bundled +lib/ansible/module_utils/six/__init__.py metaclass-boilerplate # ignore bundled +lib/ansible/module_utils/six/__init__.py no-basestring +lib/ansible/module_utils/six/__init__.py no-dict-iteritems +lib/ansible/module_utils/six/__init__.py no-dict-iterkeys +lib/ansible/module_utils/six/__init__.py no-dict-itervalues +lib/ansible/module_utils/six/__init__.py pylint:self-assigning-variable +lib/ansible/module_utils/six/__init__.py replace-urlopen +lib/ansible/module_utils/urls.py pylint:arguments-renamed +lib/ansible/module_utils/urls.py pylint:disallowed-name +lib/ansible/module_utils/urls.py replace-urlopen +lib/ansible/parsing/vault/__init__.py pylint:disallowed-name +lib/ansible/parsing/yaml/objects.py pylint:arguments-renamed +lib/ansible/playbook/base.py pylint:disallowed-name +lib/ansible/playbook/collectionsearch.py required-and-default-attributes # https://github.com/ansible/ansible/issues/61460 +lib/ansible/playbook/helpers.py pylint:disallowed-name +lib/ansible/playbook/playbook_include.py pylint:arguments-renamed +lib/ansible/playbook/role/include.py pylint:arguments-renamed +lib/ansible/plugins/action/normal.py action-plugin-docs # default action plugin for modules without a dedicated action plugin +lib/ansible/plugins/cache/base.py ansible-doc!skip # not a plugin, but a stub for backwards compatibility +lib/ansible/plugins/callback/__init__.py pylint:arguments-renamed +lib/ansible/plugins/inventory/advanced_host_list.py pylint:arguments-renamed +lib/ansible/plugins/inventory/host_list.py pylint:arguments-renamed +lib/ansible/plugins/lookup/random_choice.py pylint:arguments-renamed +lib/ansible/plugins/lookup/sequence.py pylint:disallowed-name +lib/ansible/plugins/shell/cmd.py pylint:arguments-renamed +lib/ansible/plugins/strategy/__init__.py pylint:disallowed-name +lib/ansible/plugins/strategy/linear.py pylint:disallowed-name +lib/ansible/utils/collection_loader/_collection_finder.py pylint:deprecated-class +lib/ansible/utils/collection_loader/_collection_meta.py pylint:deprecated-class +lib/ansible/vars/hostvars.py pylint:disallowed-name +test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-function # ignore, required for testing +test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import-from # ignore, required for testing +test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import # ignore, required for testing +test/integration/targets/ansible-test-integration/ansible_collections/ns/col/plugins/modules/hello.py pylint:relative-beyond-top-level +test/integration/targets/ansible-test-units/ansible_collections/ns/col/plugins/modules/hello.py pylint:relative-beyond-top-level +test/integration/targets/ansible-test-units/ansible_collections/ns/col/tests/unit/plugins/modules/test_hello.py pylint:relative-beyond-top-level +test/integration/targets/ansible-test-units/ansible_collections/ns/col/tests/unit/plugins/module_utils/test_my_util.py pylint:relative-beyond-top-level +test/integration/targets/ansible-test-docker/ansible_collections/ns/col/plugins/modules/hello.py pylint:relative-beyond-top-level +test/integration/targets/ansible-test-docker/ansible_collections/ns/col/tests/unit/plugins/modules/test_hello.py pylint:relative-beyond-top-level +test/integration/targets/ansible-test-docker/ansible_collections/ns/col/tests/unit/plugins/module_utils/test_my_util.py pylint:relative-beyond-top-level +test/integration/targets/ansible-test-no-tty/ansible_collections/ns/col/vendored_pty.py pep8!skip # vendored code +test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/modules/my_module.py pylint:relative-beyond-top-level +test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/my_util2.py pylint:relative-beyond-top-level +test/integration/targets/fork_safe_stdio/vendored_pty.py pep8!skip # vendored code +test/integration/targets/gathering_facts/library/bogus_facts shebang +test/integration/targets/gathering_facts/library/facts_one shebang +test/integration/targets/gathering_facts/library/facts_two shebang +test/integration/targets/incidental_win_reboot/templates/post_reboot.ps1 pslint!skip +test/integration/targets/json_cleanup/library/bad_json shebang +test/integration/targets/lookup_csvfile/files/crlf.csv line-endings +test/integration/targets/lookup_ini/lookup-8859-15.ini no-smart-quotes +test/integration/targets/module_precedence/lib_with_extension/a.ini shebang +test/integration/targets/module_precedence/lib_with_extension/ping.ini shebang +test/integration/targets/module_precedence/roles_with_extension/foo/library/a.ini shebang +test/integration/targets/module_precedence/roles_with_extension/foo/library/ping.ini shebang +test/integration/targets/module_utils/library/test.py future-import-boilerplate # allow testing of Python 2.x implicit relative imports +test/integration/targets/module_utils/module_utils/bar0/foo.py pylint:disallowed-name +test/integration/targets/module_utils/module_utils/foo.py pylint:disallowed-name +test/integration/targets/module_utils/module_utils/sub/bar/bar.py pylint:disallowed-name +test/integration/targets/module_utils/module_utils/sub/bar/__init__.py pylint:disallowed-name +test/integration/targets/module_utils/module_utils/yak/zebra/foo.py pylint:disallowed-name +test/integration/targets/old_style_modules_posix/library/helloworld.sh shebang +test/integration/targets/template/files/encoding_1252_utf-8.expected no-smart-quotes +test/integration/targets/template/files/encoding_1252_windows-1252.expected no-smart-quotes +test/integration/targets/template/files/foo.dos.txt line-endings +test/integration/targets/template/templates/encoding_1252.j2 no-smart-quotes +test/integration/targets/unicode/unicode.yml no-smart-quotes +test/integration/targets/windows-minimal/library/win_ping_syntax_error.ps1 pslint!skip +test/integration/targets/win_exec_wrapper/library/test_fail.ps1 pslint:PSCustomUseLiteralPath +test/integration/targets/win_exec_wrapper/tasks/main.yml no-smart-quotes # We are explicitly testing smart quote support for env vars +test/integration/targets/win_fetch/tasks/main.yml no-smart-quotes # We are explictly testing smart quotes in the file name to fetch +test/integration/targets/win_module_utils/library/legacy_only_new_way_win_line_ending.ps1 line-endings # Explicitly tests that we still work with Windows line endings +test/integration/targets/win_module_utils/library/legacy_only_old_way_win_line_ending.ps1 line-endings # Explicitly tests that we still work with Windows line endings +test/integration/targets/win_script/files/test_script.ps1 pslint:PSAvoidUsingWriteHost # Keep +test/integration/targets/win_script/files/test_script_removes_file.ps1 pslint:PSCustomUseLiteralPath +test/integration/targets/win_script/files/test_script_with_args.ps1 pslint:PSAvoidUsingWriteHost # Keep +test/integration/targets/win_script/files/test_script_with_splatting.ps1 pslint:PSAvoidUsingWriteHost # Keep +test/lib/ansible_test/_data/requirements/sanity.pslint.ps1 pslint:PSCustomUseLiteralPath # Uses wildcards on purpose +test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath +test/lib/ansible_test/_util/target/setup/requirements.py replace-urlopen +test/support/integration/plugins/modules/timezone.py pylint:disallowed-name +test/support/integration/plugins/module_utils/compat/ipaddress.py future-import-boilerplate +test/support/integration/plugins/module_utils/compat/ipaddress.py metaclass-boilerplate +test/support/integration/plugins/module_utils/compat/ipaddress.py no-unicode-literals +test/support/integration/plugins/module_utils/network/common/utils.py future-import-boilerplate +test/support/integration/plugins/module_utils/network/common/utils.py metaclass-boilerplate +test/support/integration/plugins/module_utils/network/common/utils.py pylint:use-a-generator +test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/netconf/netconf.py pylint:used-before-assignment +test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/filter/network.py pylint:consider-using-dict-comprehension +test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py no-unicode-literals +test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py pep8:E203 +test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py pylint:unnecessary-comprehension +test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py pylint:use-a-generator +test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/netconf/default.py pylint:unnecessary-comprehension +test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/cliconf/ios.py pylint:arguments-renamed +test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py pep8:E501 +test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/cliconf/vyos.py pylint:arguments-renamed +test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py pep8:E231 +test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py pylint:disallowed-name +test/support/windows-integration/plugins/action/win_copy.py pylint:used-before-assignment +test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/module_utils/WebRequest.psm1 pslint!skip +test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_uri.ps1 pslint!skip +test/support/windows-integration/plugins/modules/async_status.ps1 pslint!skip +test/support/windows-integration/plugins/modules/setup.ps1 pslint!skip +test/support/windows-integration/plugins/modules/slurp.ps1 pslint!skip +test/support/windows-integration/plugins/modules/win_acl.ps1 pslint!skip +test/support/windows-integration/plugins/modules/win_certificate_store.ps1 pslint!skip +test/support/windows-integration/plugins/modules/win_command.ps1 pslint!skip +test/support/windows-integration/plugins/modules/win_copy.ps1 pslint!skip +test/support/windows-integration/plugins/modules/win_file.ps1 pslint!skip +test/support/windows-integration/plugins/modules/win_get_url.ps1 pslint!skip +test/support/windows-integration/plugins/modules/win_lineinfile.ps1 pslint!skip +test/support/windows-integration/plugins/modules/win_regedit.ps1 pslint!skip +test/support/windows-integration/plugins/modules/win_shell.ps1 pslint!skip +test/support/windows-integration/plugins/modules/win_stat.ps1 pslint!skip +test/support/windows-integration/plugins/modules/win_tempfile.ps1 pslint!skip +test/support/windows-integration/plugins/modules/win_user_right.ps1 pslint!skip +test/support/windows-integration/plugins/modules/win_user.ps1 pslint!skip +test/support/windows-integration/plugins/modules/win_wait_for.ps1 pslint!skip +test/support/windows-integration/plugins/modules/win_whoami.ps1 pslint!skip +test/units/executor/test_play_iterator.py pylint:disallowed-name +test/units/modules/test_apt.py pylint:disallowed-name +test/units/module_utils/basic/test_deprecate_warn.py pylint:ansible-deprecated-no-version +test/units/module_utils/basic/test_deprecate_warn.py pylint:ansible-deprecated-version +test/units/module_utils/basic/test_run_command.py pylint:disallowed-name +test/units/module_utils/urls/fixtures/multipart.txt line-endings # Fixture for HTTP tests that use CRLF +test/units/module_utils/urls/test_fetch_url.py replace-urlopen +test/units/module_utils/urls/test_gzip.py replace-urlopen +test/units/module_utils/urls/test_Request.py replace-urlopen +test/units/parsing/vault/test_vault.py pylint:disallowed-name +test/units/playbook/role/test_role.py pylint:disallowed-name +test/units/plugins/test_plugins.py pylint:disallowed-name +test/units/template/test_templar.py pylint:disallowed-name +test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py pylint:relative-beyond-top-level +test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/modules/__init__.py empty-init # testing that collections don't need inits +test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/ansible/__init__.py empty-init # testing that collections don't need inits +test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/__init__.py empty-init # testing that collections don't need inits +test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/__init__.py empty-init # testing that collections don't need inits +test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/testcoll/__init__.py empty-init # testing that collections don't need inits +test/units/utils/collection_loader/test_collection_loader.py pylint:undefined-variable # magic runtime local var splatting |