summaryrefslogtreecommitdiffstats
path: root/test/sanity/code-smell
diff options
context:
space:
mode:
Diffstat (limited to 'test/sanity/code-smell')
-rw-r--r--test/sanity/code-smell/ansible-requirements.json7
-rw-r--r--test/sanity/code-smell/ansible-requirements.py29
-rw-r--r--test/sanity/code-smell/ansible-test-future-boilerplate.json10
-rw-r--r--test/sanity/code-smell/ansible-test-future-boilerplate.py59
-rw-r--r--test/sanity/code-smell/configure-remoting-ps1.json4
-rw-r--r--test/sanity/code-smell/configure-remoting-ps1.py52
-rw-r--r--test/sanity/code-smell/deprecated-config.json10
-rw-r--r--test/sanity/code-smell/deprecated-config.py103
-rw-r--r--test/sanity/code-smell/deprecated-config.requirements.in2
-rw-r--r--test/sanity/code-smell/deprecated-config.requirements.txt4
-rw-r--r--test/sanity/code-smell/docs-build.json5
-rw-r--r--test/sanity/code-smell/docs-build.py152
-rw-r--r--test/sanity/code-smell/docs-build.requirements.in9
-rw-r--r--test/sanity/code-smell/docs-build.requirements.txt50
-rw-r--r--test/sanity/code-smell/no-unwanted-files.json7
-rw-r--r--test/sanity/code-smell/no-unwanted-files.py49
-rw-r--r--test/sanity/code-smell/obsolete-files.json17
-rw-r--r--test/sanity/code-smell/obsolete-files.py17
-rw-r--r--test/sanity/code-smell/package-data.json5
-rw-r--r--test/sanity/code-smell/package-data.py405
-rw-r--r--test/sanity/code-smell/package-data.requirements.in7
-rw-r--r--test/sanity/code-smell/package-data.requirements.txt12
-rw-r--r--test/sanity/code-smell/required-and-default-attributes.json9
-rw-r--r--test/sanity/code-smell/required-and-default-attributes.py19
-rw-r--r--test/sanity/code-smell/rstcheck.json6
-rw-r--r--test/sanity/code-smell/rstcheck.py62
-rw-r--r--test/sanity/code-smell/rstcheck.requirements.in3
-rw-r--r--test/sanity/code-smell/rstcheck.requirements.txt25
-rw-r--r--test/sanity/code-smell/skip.txt2
-rw-r--r--test/sanity/code-smell/test-constraints.json11
-rw-r--r--test/sanity/code-smell/test-constraints.py126
-rw-r--r--test/sanity/code-smell/update-bundled.json8
-rw-r--r--test/sanity/code-smell/update-bundled.py178
-rw-r--r--test/sanity/code-smell/update-bundled.requirements.in1
-rw-r--r--test/sanity/code-smell/update-bundled.requirements.txt3
35 files changed, 1468 insertions, 0 deletions
diff --git a/test/sanity/code-smell/ansible-requirements.json b/test/sanity/code-smell/ansible-requirements.json
new file mode 100644
index 0000000..b4b7f2b
--- /dev/null
+++ b/test/sanity/code-smell/ansible-requirements.json
@@ -0,0 +1,7 @@
+{
+ "prefixes": [
+ "requirements.txt",
+ "test/lib/ansible_test/_data/requirements/ansible.txt"
+ ],
+ "output": "path-line-column-message"
+}
diff --git a/test/sanity/code-smell/ansible-requirements.py b/test/sanity/code-smell/ansible-requirements.py
new file mode 100644
index 0000000..4d1a652
--- /dev/null
+++ b/test/sanity/code-smell/ansible-requirements.py
@@ -0,0 +1,29 @@
+from __future__ import annotations
+
+import re
+import sys
+
+
+def read_file(path):
+ try:
+ with open(path, 'r') as f:
+ return f.read()
+ except Exception as ex: # pylint: disable=broad-except
+ print('%s:%d:%d: unable to read required file %s' % (path, 0, 0, re.sub(r'\s+', ' ', str(ex))))
+ return None
+
+
+def main():
+ ORIGINAL_FILE = 'requirements.txt'
+ VENDORED_COPY = 'test/lib/ansible_test/_data/requirements/ansible.txt'
+
+ original_requirements = read_file(ORIGINAL_FILE)
+ vendored_requirements = read_file(VENDORED_COPY)
+
+ if original_requirements is not None and vendored_requirements is not None:
+ if original_requirements != vendored_requirements:
+ print('%s:%d:%d: must be identical to %s' % (VENDORED_COPY, 0, 0, ORIGINAL_FILE))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/sanity/code-smell/ansible-test-future-boilerplate.json b/test/sanity/code-smell/ansible-test-future-boilerplate.json
new file mode 100644
index 0000000..ca4c067
--- /dev/null
+++ b/test/sanity/code-smell/ansible-test-future-boilerplate.json
@@ -0,0 +1,10 @@
+{
+ "extensions": [
+ ".py"
+ ],
+ "prefixes": [
+ "test/sanity/",
+ "test/lib/ansible_test/"
+ ],
+ "output": "path-message"
+}
diff --git a/test/sanity/code-smell/ansible-test-future-boilerplate.py b/test/sanity/code-smell/ansible-test-future-boilerplate.py
new file mode 100644
index 0000000..9a62225
--- /dev/null
+++ b/test/sanity/code-smell/ansible-test-future-boilerplate.py
@@ -0,0 +1,59 @@
+from __future__ import annotations
+
+import ast
+import sys
+
+
+def main():
+ # The following directories contain code which must work under Python 2.x.
+ py2_compat = (
+ 'test/lib/ansible_test/_util/target/',
+ )
+
+ for path in sys.argv[1:] or sys.stdin.read().splitlines():
+ if any(path.startswith(prefix) for prefix in py2_compat):
+ continue
+
+ with open(path, 'rb') as path_fd:
+ lines = path_fd.read().splitlines()
+
+ missing = True
+ if not lines:
+ # Files are allowed to be empty of everything including boilerplate
+ missing = False
+
+ invalid_future = []
+
+ for text in lines:
+ if text == b'from __future__ import annotations':
+ missing = False
+ break
+
+ if text.startswith(b'from __future__ ') or text == b'__metaclass__ = type':
+ invalid_future.append(text.decode())
+
+ if missing:
+ with open(path) as file:
+ contents = file.read()
+
+ # noinspection PyBroadException
+ try:
+ node = ast.parse(contents)
+
+ # files consisting of only assignments have no need for future import boilerplate
+ # the only exception would be division during assignment, but we'll overlook that for simplicity
+ # the most likely case is that of a documentation only python file
+ if all(isinstance(statement, ast.Assign) for statement in node.body):
+ missing = False
+ except Exception: # pylint: disable=broad-except
+ pass # the compile sanity test will report this error
+
+ if missing:
+ print('%s: missing: from __future__ import annotations' % path)
+
+ for text in invalid_future:
+ print('%s: invalid: %s' % (path, text))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/sanity/code-smell/configure-remoting-ps1.json b/test/sanity/code-smell/configure-remoting-ps1.json
new file mode 100644
index 0000000..593b765
--- /dev/null
+++ b/test/sanity/code-smell/configure-remoting-ps1.json
@@ -0,0 +1,4 @@
+{
+ "no_targets": true,
+ "output": "path-message"
+}
diff --git a/test/sanity/code-smell/configure-remoting-ps1.py b/test/sanity/code-smell/configure-remoting-ps1.py
new file mode 100644
index 0000000..fe67800
--- /dev/null
+++ b/test/sanity/code-smell/configure-remoting-ps1.py
@@ -0,0 +1,52 @@
+from __future__ import annotations
+
+import os
+
+
+def main():
+ # required by external automated processes and should not be moved, renamed or converted to a symbolic link
+ original = 'examples/scripts/ConfigureRemotingForAnsible.ps1'
+ # required to be packaged with ansible-test and must match the original file, but cannot be a symbolic link
+ # the packaged version is needed to run tests when ansible-test has been installed
+ # keeping the packaged version identical to the original makes sure tests cover both files
+ packaged = 'test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1'
+
+ copy_valid = False
+
+ if os.path.isfile(original) and os.path.isfile(packaged):
+ with open(original, 'rb') as original_file:
+ original_content = original_file.read()
+
+ with open(packaged, 'rb') as packaged_file:
+ packaged_content = packaged_file.read()
+
+ if original_content == packaged_content:
+ copy_valid = True
+
+ if not copy_valid:
+ print('%s: must be an exact copy of "%s"' % (packaged, original))
+
+ for path in [original, packaged]:
+ directory = path
+
+ while True:
+ directory = os.path.dirname(directory)
+
+ if not directory:
+ break
+
+ if not os.path.isdir(directory):
+ print('%s: must be a directory' % directory)
+
+ if os.path.islink(directory):
+ print('%s: cannot be a symbolic link' % directory)
+
+ if not os.path.isfile(path):
+ print('%s: must be a file' % path)
+
+ if os.path.islink(path):
+ print('%s: cannot be a symbolic link' % path)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/sanity/code-smell/deprecated-config.json b/test/sanity/code-smell/deprecated-config.json
new file mode 100644
index 0000000..4a88486
--- /dev/null
+++ b/test/sanity/code-smell/deprecated-config.json
@@ -0,0 +1,10 @@
+{
+ "all_targets": true,
+ "output": "path-message",
+ "extensions": [
+ ".py"
+ ],
+ "prefixes": [
+ "lib/ansible/"
+ ]
+}
diff --git a/test/sanity/code-smell/deprecated-config.py b/test/sanity/code-smell/deprecated-config.py
new file mode 100644
index 0000000..474628a
--- /dev/null
+++ b/test/sanity/code-smell/deprecated-config.py
@@ -0,0 +1,103 @@
+# -*- coding: utf-8 -*-
+# (c) 2018, Matt Martz <matt@sivel.net>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+from __future__ import annotations
+
+import mmap
+import os
+import re
+import sys
+
+from ansible.module_utils.compat.version import StrictVersion
+
+import yaml
+
+import ansible.config
+
+from ansible.plugins.loader import fragment_loader
+from ansible.release import __version__ as ansible_version
+from ansible.utils.plugin_docs import get_docstring
+
+DOC_RE = re.compile(b'^DOCUMENTATION', flags=re.M)
+ANSIBLE_MAJOR = StrictVersion('.'.join(ansible_version.split('.')[:2]))
+
+
+def find_deprecations(obj, path=None):
+ if not isinstance(obj, (list, dict)):
+ return
+
+ try:
+ items = obj.items()
+ except AttributeError:
+ items = enumerate(obj)
+
+ for key, value in items:
+ if path is None:
+ this_path = []
+ else:
+ this_path = path[:]
+
+ this_path.append(key)
+
+ if key != 'deprecated':
+ for result in find_deprecations(value, path=this_path):
+ yield result
+ else:
+ try:
+ version = value['version']
+ this_path.append('version')
+ except KeyError:
+ version = value['removed_in']
+ this_path.append('removed_in')
+ if StrictVersion(version) <= ANSIBLE_MAJOR:
+ yield (this_path, version)
+
+
+def main():
+ plugins = []
+ for path in sys.argv[1:] or sys.stdin.read().splitlines():
+ with open(path, 'rb') as f:
+ try:
+ mm_file = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)
+ except ValueError:
+ continue
+ if DOC_RE.search(mm_file):
+ plugins.append(path)
+ mm_file.close()
+
+ for plugin in plugins:
+ data = {}
+ data['doc'], data['examples'], data['return'], data['metadata'] = get_docstring(plugin, fragment_loader)
+ for result in find_deprecations(data['doc']):
+ print(
+ '%s: %s is scheduled for removal in %s' % (plugin, '.'.join(str(i) for i in result[0][:-2]), result[1])
+ )
+
+ base = os.path.join(os.path.dirname(ansible.config.__file__), 'base.yml')
+ root_path = os.path.dirname(os.path.dirname(os.path.dirname(ansible.__file__)))
+ relative_base = os.path.relpath(base, root_path)
+
+ with open(base) as f:
+ data = yaml.safe_load(f)
+
+ for result in find_deprecations(data):
+ print('%s: %s is scheduled for removal in %s' % (relative_base, '.'.join(str(i) for i in result[0][:-2]), result[1]))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/sanity/code-smell/deprecated-config.requirements.in b/test/sanity/code-smell/deprecated-config.requirements.in
new file mode 100644
index 0000000..859c4ee
--- /dev/null
+++ b/test/sanity/code-smell/deprecated-config.requirements.in
@@ -0,0 +1,2 @@
+jinja2 # ansible-core requirement
+pyyaml
diff --git a/test/sanity/code-smell/deprecated-config.requirements.txt b/test/sanity/code-smell/deprecated-config.requirements.txt
new file mode 100644
index 0000000..6ab26e3
--- /dev/null
+++ b/test/sanity/code-smell/deprecated-config.requirements.txt
@@ -0,0 +1,4 @@
+# edit "deprecated-config.requirements.in" and generate with: hacking/update-sanity-requirements.py --test deprecated-config
+Jinja2==3.1.2
+MarkupSafe==2.1.1
+PyYAML==6.0
diff --git a/test/sanity/code-smell/docs-build.json b/test/sanity/code-smell/docs-build.json
new file mode 100644
index 0000000..a43fa92
--- /dev/null
+++ b/test/sanity/code-smell/docs-build.json
@@ -0,0 +1,5 @@
+{
+ "disabled": true,
+ "no_targets": true,
+ "output": "path-line-column-message"
+}
diff --git a/test/sanity/code-smell/docs-build.py b/test/sanity/code-smell/docs-build.py
new file mode 100644
index 0000000..aaa6937
--- /dev/null
+++ b/test/sanity/code-smell/docs-build.py
@@ -0,0 +1,152 @@
+from __future__ import annotations
+
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+
+
+def main():
+ base_dir = os.getcwd() + os.path.sep
+ docs_dir = os.path.abspath('docs/docsite')
+
+ # TODO: Remove this temporary hack to constrain 'cryptography' when we have
+ # a better story for dealing with it.
+ tmpfd, tmp = tempfile.mkstemp()
+ requirements_txt = os.path.join(base_dir, 'requirements.txt')
+ shutil.copy2(requirements_txt, tmp)
+ lines = []
+ with open(requirements_txt, 'r') as f:
+ for line in f.readlines():
+ if line.strip() == 'cryptography':
+ line = 'cryptography < 3.4\n'
+ lines.append(line)
+
+ with open(requirements_txt, 'w') as f:
+ f.writelines(lines)
+
+ try:
+ cmd = ['make', 'core_singlehtmldocs']
+ sphinx = subprocess.run(cmd, stdin=subprocess.DEVNULL, capture_output=True, cwd=docs_dir, check=False, text=True)
+ finally:
+ shutil.move(tmp, requirements_txt)
+
+ stdout = sphinx.stdout
+ stderr = sphinx.stderr
+
+ if sphinx.returncode != 0:
+ sys.stderr.write("Command '%s' failed with status code: %d\n" % (' '.join(cmd), sphinx.returncode))
+
+ if stdout.strip():
+ stdout = simplify_stdout(stdout)
+
+ sys.stderr.write("--> Standard Output\n")
+ sys.stderr.write("%s\n" % stdout.strip())
+
+ if stderr.strip():
+ sys.stderr.write("--> Standard Error\n")
+ sys.stderr.write("%s\n" % stderr.strip())
+
+ sys.exit(1)
+
+ with open('docs/docsite/rst_warnings', 'r') as warnings_fd:
+ output = warnings_fd.read().strip()
+ lines = output.splitlines()
+
+ known_warnings = {
+ 'block-quote-missing-blank-line': r'^Block quote ends without a blank line; unexpected unindent.$',
+ 'literal-block-lex-error': r'^Could not lex literal_block as "[^"]*". Highlighting skipped.$',
+ 'duplicate-label': r'^duplicate label ',
+ 'undefined-label': r'undefined label: ',
+ 'unknown-document': r'unknown document: ',
+ 'toc-tree-missing-document': r'toctree contains reference to nonexisting document ',
+ 'reference-target-not-found': r'[^ ]* reference target not found: ',
+ 'not-in-toc-tree': r"document isn't included in any toctree$",
+ 'unexpected-indentation': r'^Unexpected indentation.$',
+ 'definition-list-missing-blank-line': r'^Definition list ends without a blank line; unexpected unindent.$',
+ 'explicit-markup-missing-blank-line': r'Explicit markup ends without a blank line; unexpected unindent.$',
+ 'toc-tree-glob-pattern-no-match': r"^toctree glob pattern '[^']*' didn't match any documents$",
+ 'unknown-interpreted-text-role': '^Unknown interpreted text role "[^"]*".$',
+ }
+
+ for line in lines:
+ match = re.search('^(?P<path>[^:]+):((?P<line>[0-9]+):)?((?P<column>[0-9]+):)? (?P<level>WARNING|ERROR): (?P<message>.*)$', line)
+
+ if not match:
+ path = 'docs/docsite/rst/index.rst'
+ lineno = 0
+ column = 0
+ code = 'unknown'
+ message = line
+
+ # surface unknown lines while filtering out known lines to avoid excessive output
+ print('%s:%d:%d: %s: %s' % (path, lineno, column, code, message))
+ continue
+
+ path = match.group('path')
+ lineno = int(match.group('line') or 0)
+ column = int(match.group('column') or 0)
+ level = match.group('level').lower()
+ message = match.group('message')
+
+ path = os.path.abspath(path)
+
+ if path.startswith(base_dir):
+ path = path[len(base_dir):]
+
+ if path.startswith('rst/'):
+ path = 'docs/docsite/' + path # fix up paths reported relative to `docs/docsite/`
+
+ if level == 'warning':
+ code = 'warning'
+
+ for label, pattern in known_warnings.items():
+ if re.search(pattern, message):
+ code = label
+ break
+ else:
+ code = 'error'
+
+ print('%s:%d:%d: %s: %s' % (path, lineno, column, code, message))
+
+
+def simplify_stdout(value):
+ """Simplify output by omitting earlier 'rendering: ...' messages."""
+ lines = value.strip().splitlines()
+
+ rendering = []
+ keep = []
+
+ def truncate_rendering():
+ """Keep last rendering line (if any) with a message about omitted lines as needed."""
+ if not rendering:
+ return
+
+ notice = rendering[-1]
+
+ if len(rendering) > 1:
+ notice += ' (%d previous rendering line(s) omitted)' % (len(rendering) - 1)
+
+ keep.append(notice)
+ # Could change to rendering.clear() if we do not support python2
+ rendering[:] = []
+
+ for line in lines:
+ if line.startswith('rendering: '):
+ rendering.append(line)
+ continue
+
+ truncate_rendering()
+ keep.append(line)
+
+ truncate_rendering()
+
+ result = '\n'.join(keep)
+
+ return result
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/sanity/code-smell/docs-build.requirements.in b/test/sanity/code-smell/docs-build.requirements.in
new file mode 100644
index 0000000..02c3bfc
--- /dev/null
+++ b/test/sanity/code-smell/docs-build.requirements.in
@@ -0,0 +1,9 @@
+jinja2
+pyyaml
+resolvelib < 0.9.0
+sphinx == 4.2.0
+sphinx-notfound-page
+sphinx-ansible-theme
+straight.plugin
+rstcheck < 4 # match version used in other sanity tests
+antsibull-docs == 1.7.0 # currently approved version
diff --git a/test/sanity/code-smell/docs-build.requirements.txt b/test/sanity/code-smell/docs-build.requirements.txt
new file mode 100644
index 0000000..7e30a73
--- /dev/null
+++ b/test/sanity/code-smell/docs-build.requirements.txt
@@ -0,0 +1,50 @@
+# edit "docs-build.requirements.in" and generate with: hacking/update-sanity-requirements.py --test docs-build
+aiofiles==22.1.0
+aiohttp==3.8.3
+aiosignal==1.2.0
+alabaster==0.7.12
+ansible-pygments==0.1.1
+antsibull-core==1.2.0
+antsibull-docs==1.7.0
+async-timeout==4.0.2
+asyncio-pool==0.6.0
+attrs==22.1.0
+Babel==2.10.3
+certifi==2022.9.14
+charset-normalizer==2.1.1
+docutils==0.17.1
+frozenlist==1.3.1
+idna==3.4
+imagesize==1.4.1
+Jinja2==3.1.2
+MarkupSafe==2.1.1
+multidict==6.0.2
+packaging==21.3
+perky==0.5.5
+pydantic==1.10.2
+Pygments==2.13.0
+pyparsing==3.0.9
+pytz==2022.2.1
+PyYAML==6.0
+requests==2.28.1
+resolvelib==0.8.1
+rstcheck==3.5.0
+semantic-version==2.10.0
+sh==1.14.3
+six==1.16.0
+snowballstemmer==2.2.0
+Sphinx==4.2.0
+sphinx-ansible-theme==0.9.1
+sphinx-notfound-page==0.8.3
+sphinx-rtd-theme==1.0.0
+sphinxcontrib-applehelp==1.0.2
+sphinxcontrib-devhelp==1.0.2
+sphinxcontrib-htmlhelp==2.0.0
+sphinxcontrib-jsmath==1.0.1
+sphinxcontrib-qthelp==1.0.3
+sphinxcontrib-serializinghtml==1.1.5
+straight.plugin==1.5.0
+Twiggy==0.5.1
+typing_extensions==4.3.0
+urllib3==1.26.12
+yarl==1.8.1
diff --git a/test/sanity/code-smell/no-unwanted-files.json b/test/sanity/code-smell/no-unwanted-files.json
new file mode 100644
index 0000000..7a89ebb
--- /dev/null
+++ b/test/sanity/code-smell/no-unwanted-files.json
@@ -0,0 +1,7 @@
+{
+ "include_symlinks": true,
+ "prefixes": [
+ "lib/"
+ ],
+ "output": "path-message"
+}
diff --git a/test/sanity/code-smell/no-unwanted-files.py b/test/sanity/code-smell/no-unwanted-files.py
new file mode 100644
index 0000000..7e13f53
--- /dev/null
+++ b/test/sanity/code-smell/no-unwanted-files.py
@@ -0,0 +1,49 @@
+"""Prevent unwanted files from being added to the source tree."""
+from __future__ import annotations
+
+import os
+import sys
+
+
+def main():
+ """Main entry point."""
+ paths = sys.argv[1:] or sys.stdin.read().splitlines()
+
+ allowed_extensions = (
+ '.cs',
+ '.ps1',
+ '.psm1',
+ '.py',
+ )
+
+ skip_paths = set([
+ 'lib/ansible/config/ansible_builtin_runtime.yml', # not included in the sanity ignore file since it won't exist until after migration
+ ])
+
+ skip_directories = (
+ 'lib/ansible/galaxy/data/',
+ )
+
+ allow_yaml = ('lib/ansible/plugins/test', 'lib/ansible/plugins/filter')
+
+ for path in paths:
+ if path in skip_paths:
+ continue
+
+ if any(path.startswith(skip_directory) for skip_directory in skip_directories):
+ continue
+
+ if path.startswith('lib/') and not path.startswith('lib/ansible/'):
+ print('%s: all "lib" content must reside in the "lib/ansible" directory' % path)
+ continue
+
+ ext = os.path.splitext(path)[1]
+ if ext in ('.yml', ) and any(path.startswith(yaml_directory) for yaml_directory in allow_yaml):
+ continue
+
+ if ext not in allowed_extensions:
+ print('%s: extension must be one of: %s' % (path, ', '.join(allowed_extensions)))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/sanity/code-smell/obsolete-files.json b/test/sanity/code-smell/obsolete-files.json
new file mode 100644
index 0000000..02d3920
--- /dev/null
+++ b/test/sanity/code-smell/obsolete-files.json
@@ -0,0 +1,17 @@
+{
+ "include_symlinks": true,
+ "prefixes": [
+ "test/runner/",
+ "test/sanity/ansible-doc/",
+ "test/sanity/compile/",
+ "test/sanity/import/",
+ "test/sanity/pep8/",
+ "test/sanity/pslint/",
+ "test/sanity/pylint/",
+ "test/sanity/rstcheck/",
+ "test/sanity/shellcheck/",
+ "test/sanity/validate-modules/",
+ "test/sanity/yamllint/"
+ ],
+ "output": "path-message"
+}
diff --git a/test/sanity/code-smell/obsolete-files.py b/test/sanity/code-smell/obsolete-files.py
new file mode 100644
index 0000000..3c1a4a4
--- /dev/null
+++ b/test/sanity/code-smell/obsolete-files.py
@@ -0,0 +1,17 @@
+"""Prevent files from being added to directories that are now obsolete."""
+from __future__ import annotations
+
+import os
+import sys
+
+
+def main():
+ """Main entry point."""
+ paths = sys.argv[1:] or sys.stdin.read().splitlines()
+
+ for path in paths:
+ print('%s: directory "%s/" is obsolete and should not contain any files' % (path, os.path.dirname(path)))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/sanity/code-smell/package-data.json b/test/sanity/code-smell/package-data.json
new file mode 100644
index 0000000..0aa70a3
--- /dev/null
+++ b/test/sanity/code-smell/package-data.json
@@ -0,0 +1,5 @@
+{
+ "disabled": true,
+ "all_targets": true,
+ "output": "path-message"
+}
diff --git a/test/sanity/code-smell/package-data.py b/test/sanity/code-smell/package-data.py
new file mode 100644
index 0000000..0c6e761
--- /dev/null
+++ b/test/sanity/code-smell/package-data.py
@@ -0,0 +1,405 @@
+from __future__ import annotations
+
+import contextlib
+import fnmatch
+import glob
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tarfile
+import tempfile
+
+
+def assemble_files_to_ship(complete_file_list):
+ """
+ This looks for all files which should be shipped in the sdist
+ """
+ # All files which are in the repository except these:
+ ignore_patterns = (
+ # Developer-only tools
+ '.azure-pipelines/*',
+ '.github/*',
+ '.github/*/*',
+ 'changelogs/fragments/*',
+ 'hacking/backport/*',
+ 'hacking/azp/*',
+ 'hacking/tests/*',
+ 'hacking/ticket_stubs/*',
+ 'test/sanity/code-smell/botmeta.*',
+ 'test/sanity/code-smell/release-names.*',
+ 'test/utils/*',
+ 'test/utils/*/*',
+ 'test/utils/*/*/*',
+ 'test/results/.tmp/*',
+ 'test/results/.tmp/*/*',
+ 'test/results/.tmp/*/*/*',
+ 'test/results/.tmp/*/*/*/*',
+ 'test/results/.tmp/*/*/*/*/*',
+ '.git*',
+ )
+ ignore_files = frozenset((
+ # Developer-only tools
+ 'changelogs/config.yaml',
+ 'hacking/README.md',
+ 'hacking/ansible-profile',
+ 'hacking/cgroup_perf_recap_graph.py',
+ 'hacking/create_deprecated_issues.py',
+ 'hacking/deprecated_issue_template.md',
+ 'hacking/create_deprecation_bug_reports.py',
+ 'hacking/fix_test_syntax.py',
+ 'hacking/get_library.py',
+ 'hacking/metadata-tool.py',
+ 'hacking/report.py',
+ 'hacking/return_skeleton_generator.py',
+ 'hacking/test-module',
+ 'test/support/README.md',
+ 'test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py',
+ 'test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py',
+ '.cherry_picker.toml',
+ '.mailmap',
+ # Generated as part of a build step
+ 'docs/docsite/rst/conf.py',
+ 'docs/docsite/rst/index.rst',
+ 'docs/docsite/rst/dev_guide/index.rst',
+ # Possibly should be included
+ 'examples/scripts/uptime.py',
+ 'examples/scripts/my_test.py',
+ 'examples/scripts/my_test_info.py',
+ 'examples/scripts/my_test_facts.py',
+ 'examples/DOCUMENTATION.yml',
+ 'examples/play.yml',
+ 'examples/hosts.yaml',
+ 'examples/hosts.yml',
+ 'examples/inventory_script_schema.json',
+ 'examples/plugin_filters.yml',
+ 'hacking/env-setup',
+ 'hacking/env-setup.fish',
+ 'MANIFEST',
+ 'setup.cfg',
+ # docs for test files not included in sdist
+ 'docs/docsite/rst/dev_guide/testing/sanity/bin-symlinks.rst',
+ 'docs/docsite/rst/dev_guide/testing/sanity/botmeta.rst',
+ 'docs/docsite/rst/dev_guide/testing/sanity/integration-aliases.rst',
+ 'docs/docsite/rst/dev_guide/testing/sanity/release-names.rst',
+ ))
+
+ # These files are generated and then intentionally added to the sdist
+
+ # Manpages
+ ignore_script = ('ansible-connection', 'ansible-test')
+ manpages = ['docs/man/man1/ansible.1']
+ for dirname, dummy, files in os.walk('bin'):
+ for filename in files:
+ if filename in ignore_script:
+ continue
+ manpages.append('docs/man/man1/%s.1' % filename)
+
+ # Misc
+ misc_generated_files = [
+ 'PKG-INFO',
+ ]
+
+ shipped_files = manpages + misc_generated_files
+
+ for path in complete_file_list:
+ if path not in ignore_files:
+ for ignore in ignore_patterns:
+ if fnmatch.fnmatch(path, ignore):
+ break
+ else:
+ shipped_files.append(path)
+
+ return shipped_files
+
+
+def assemble_files_to_install(complete_file_list):
+ """
+ This looks for all of the files which should show up in an installation of ansible
+ """
+ ignore_patterns = (
+ # Tests excluded from sdist
+ 'test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py',
+ 'test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py',
+ )
+
+ pkg_data_files = []
+ for path in complete_file_list:
+
+ if path.startswith("lib/ansible"):
+ prefix = 'lib'
+ elif path.startswith("test/lib/ansible_test"):
+ prefix = 'test/lib'
+ else:
+ continue
+
+ for ignore in ignore_patterns:
+ if fnmatch.fnmatch(path, ignore):
+ break
+ else:
+ pkg_data_files.append(os.path.relpath(path, prefix))
+
+ return pkg_data_files
+
+
+@contextlib.contextmanager
+def clean_repository(file_list):
+ """Copy the repository to clean it of artifacts"""
+ # Create a tempdir that will be the clean repo
+ with tempfile.TemporaryDirectory() as repo_root:
+ directories = set((repo_root + os.path.sep,))
+
+ for filename in file_list:
+ # Determine if we need to create the directory
+ directory = os.path.dirname(filename)
+ dest_dir = os.path.join(repo_root, directory)
+ if dest_dir not in directories:
+ os.makedirs(dest_dir)
+
+ # Keep track of all the directories that now exist
+ path_components = directory.split(os.path.sep)
+ path = repo_root
+ for component in path_components:
+ path = os.path.join(path, component)
+ if path not in directories:
+ directories.add(path)
+
+ # Copy the file
+ shutil.copy2(filename, dest_dir, follow_symlinks=False)
+
+ yield repo_root
+
+
+def create_sdist(tmp_dir):
+ """Create an sdist in the repository"""
+ create = subprocess.run(
+ ['make', 'snapshot', 'SDIST_DIR=%s' % tmp_dir],
+ stdin=subprocess.DEVNULL,
+ capture_output=True,
+ text=True,
+ check=False,
+ )
+
+ stderr = create.stderr
+
+ if create.returncode != 0:
+ raise Exception('make snapshot failed:\n%s' % stderr)
+
+ # Determine path to sdist
+ tmp_dir_files = os.listdir(tmp_dir)
+
+ if not tmp_dir_files:
+ raise Exception('sdist was not created in the temp dir')
+ elif len(tmp_dir_files) > 1:
+ raise Exception('Unexpected extra files in the temp dir')
+
+ return os.path.join(tmp_dir, tmp_dir_files[0])
+
+
+def extract_sdist(sdist_path, tmp_dir):
+ """Untar the sdist"""
+ # Untar the sdist from the tmp_dir
+ with tarfile.open(os.path.join(tmp_dir, sdist_path), 'r|*') as sdist:
+ sdist.extractall(path=tmp_dir)
+
+ # Determine the sdist directory name
+ sdist_filename = os.path.basename(sdist_path)
+ tmp_dir_files = os.listdir(tmp_dir)
+ try:
+ tmp_dir_files.remove(sdist_filename)
+ except ValueError:
+ # Unexpected could not find original sdist in temp dir
+ raise
+
+ if len(tmp_dir_files) > 1:
+ raise Exception('Unexpected extra files in the temp dir')
+ elif len(tmp_dir_files) < 1:
+ raise Exception('sdist extraction did not occur i nthe temp dir')
+
+ return os.path.join(tmp_dir, tmp_dir_files[0])
+
+
+def install_sdist(tmp_dir, sdist_dir):
+ """Install the extracted sdist into the temporary directory"""
+ install = subprocess.run(
+ ['python', 'setup.py', 'install', '--root=%s' % tmp_dir],
+ stdin=subprocess.DEVNULL,
+ capture_output=True,
+ text=True,
+ cwd=os.path.join(tmp_dir, sdist_dir),
+ check=False,
+ )
+
+ stdout, stderr = install.stdout, install.stderr
+
+ if install.returncode != 0:
+ raise Exception('sdist install failed:\n%s' % stderr)
+
+ # Determine the prefix for the installed files
+ match = re.search('^copying .* -> (%s/.*?/(?:site|dist)-packages)/ansible$' %
+ tmp_dir, stdout, flags=re.M)
+
+ return match.group(1)
+
+
+def check_sdist_contains_expected(sdist_dir, to_ship_files):
+ """Check that the files we expect to ship are present in the sdist"""
+ results = []
+ for filename in to_ship_files:
+ path = os.path.join(sdist_dir, filename)
+ if not os.path.exists(path):
+ results.append('%s: File was not added to sdist' % filename)
+
+ # Also changelog
+ changelog_files = glob.glob(os.path.join(sdist_dir, 'changelogs/CHANGELOG-v2.[0-9]*.rst'))
+ if not changelog_files:
+ results.append('changelogs/CHANGELOG-v2.*.rst: Changelog file was not added to the sdist')
+ elif len(changelog_files) > 1:
+ results.append('changelogs/CHANGELOG-v2.*.rst: Too many changelog files: %s'
+ % changelog_files)
+
+ return results
+
+
+def check_sdist_files_are_wanted(sdist_dir, to_ship_files):
+ """Check that all files in the sdist are desired"""
+ results = []
+ for dirname, dummy, files in os.walk(sdist_dir):
+ dirname = os.path.relpath(dirname, start=sdist_dir)
+ if dirname == '.':
+ dirname = ''
+
+ for filename in files:
+ if filename == 'setup.cfg':
+ continue
+
+ path = os.path.join(dirname, filename)
+ if path not in to_ship_files:
+
+ if fnmatch.fnmatch(path, 'changelogs/CHANGELOG-v2.[0-9]*.rst'):
+ # changelog files are expected
+ continue
+
+ if fnmatch.fnmatch(path, 'lib/ansible_core.egg-info/*'):
+ continue
+
+ # FIXME: ansible-test doesn't pass the paths of symlinks to us so we aren't
+ # checking those
+ if not os.path.islink(os.path.join(sdist_dir, path)):
+ results.append('%s: File in sdist was not in the repository' % path)
+
+ return results
+
+
+def check_installed_contains_expected(install_dir, to_install_files):
+ """Check that all the files we expect to be installed are"""
+ results = []
+ for filename in to_install_files:
+ path = os.path.join(install_dir, filename)
+ if not os.path.exists(path):
+ results.append('%s: File not installed' % os.path.join('lib', filename))
+
+ return results
+
+
+EGG_RE = re.compile('ansible[^/]+\\.egg-info/(PKG-INFO|SOURCES.txt|'
+ 'dependency_links.txt|not-zip-safe|requires.txt|top_level.txt|entry_points.txt)$')
+
+
+def check_installed_files_are_wanted(install_dir, to_install_files):
+ """Check that all installed files were desired"""
+ results = []
+
+ for dirname, dummy, files in os.walk(install_dir):
+ dirname = os.path.relpath(dirname, start=install_dir)
+ if dirname == '.':
+ dirname = ''
+
+ for filename in files:
+ # If this is a byte code cache, look for the python file's name
+ directory = dirname
+ if filename.endswith('.pyc') or filename.endswith('.pyo'):
+ # Remove the trailing "o" or c"
+ filename = filename[:-1]
+
+ if directory.endswith('%s__pycache__' % os.path.sep):
+ # Python3 byte code cache, look for the basename of
+ # __pycache__/__init__.cpython-36.py
+ segments = filename.rsplit('.', 2)
+ if len(segments) >= 3:
+ filename = '.'.join((segments[0], segments[2]))
+ directory = os.path.dirname(directory)
+
+ path = os.path.join(directory, filename)
+
+ # Test that the file was listed for installation
+ if path not in to_install_files:
+ # FIXME: ansible-test doesn't pass the paths of symlinks to us so we
+ # aren't checking those
+ if not os.path.islink(os.path.join(install_dir, path)):
+ if not EGG_RE.match(path):
+ results.append('%s: File was installed but was not supposed to be' % path)
+
+ return results
+
+
+def _find_symlinks():
+ symlink_list = []
+ for dirname, directories, filenames in os.walk('.'):
+ for filename in filenames:
+ path = os.path.join(dirname, filename)
+ # Strip off "./" from the front
+ path = path[2:]
+ if os.path.islink(path):
+ symlink_list.append(path)
+
+ return symlink_list
+
+
+def main():
+ """All of the files in the repository"""
+ complete_file_list = []
+ for path in sys.argv[1:] or sys.stdin.read().splitlines():
+ complete_file_list.append(path)
+
+ # ansible-test isn't currently passing symlinks to us so construct those ourselves for now
+ for filename in _find_symlinks():
+ if filename not in complete_file_list:
+ # For some reason ansible-test is passing us lib/ansible/module_utils/ansible_release.py
+ # which is a symlink even though it doesn't pass any others
+ complete_file_list.append(filename)
+
+ # We may run this after docs sanity tests so get a clean repository to run in
+ with clean_repository(complete_file_list) as clean_repo_dir:
+ os.chdir(clean_repo_dir)
+
+ to_ship_files = assemble_files_to_ship(complete_file_list)
+ to_install_files = assemble_files_to_install(complete_file_list)
+
+ results = []
+ with tempfile.TemporaryDirectory() as tmp_dir:
+ sdist_path = create_sdist(tmp_dir)
+ sdist_dir = extract_sdist(sdist_path, tmp_dir)
+
+ # Check that the files that are supposed to be in the sdist are there
+ results.extend(check_sdist_contains_expected(sdist_dir, to_ship_files))
+
+ # Check that the files that are in the sdist are in the repository
+ results.extend(check_sdist_files_are_wanted(sdist_dir, to_ship_files))
+
+ # install the sdist
+ install_dir = install_sdist(tmp_dir, sdist_dir)
+
+ # Check that the files that are supposed to be installed are there
+ results.extend(check_installed_contains_expected(install_dir, to_install_files))
+
+ # Check that the files that are installed are supposed to be installed
+ results.extend(check_installed_files_are_wanted(install_dir, to_install_files))
+
+ for message in results:
+ print(message)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/sanity/code-smell/package-data.requirements.in b/test/sanity/code-smell/package-data.requirements.in
new file mode 100644
index 0000000..6b58f75
--- /dev/null
+++ b/test/sanity/code-smell/package-data.requirements.in
@@ -0,0 +1,7 @@
+docutils < 0.18 # match version required by sphinx in the docs-build sanity test
+jinja2
+pyyaml # ansible-core requirement
+resolvelib < 0.9.0
+rstcheck < 4 # match version used in other sanity tests
+straight.plugin
+antsibull-changelog
diff --git a/test/sanity/code-smell/package-data.requirements.txt b/test/sanity/code-smell/package-data.requirements.txt
new file mode 100644
index 0000000..94ad68f
--- /dev/null
+++ b/test/sanity/code-smell/package-data.requirements.txt
@@ -0,0 +1,12 @@
+# edit "package-data.requirements.in" and generate with: hacking/update-sanity-requirements.py --test package-data
+antsibull-changelog==0.16.0
+docutils==0.17.1
+Jinja2==3.1.2
+MarkupSafe==2.1.1
+packaging==21.3
+pyparsing==3.0.9
+PyYAML==6.0
+resolvelib==0.8.1
+rstcheck==3.5.0
+semantic-version==2.10.0
+straight.plugin==1.5.0
diff --git a/test/sanity/code-smell/required-and-default-attributes.json b/test/sanity/code-smell/required-and-default-attributes.json
new file mode 100644
index 0000000..dd9ac7b
--- /dev/null
+++ b/test/sanity/code-smell/required-and-default-attributes.json
@@ -0,0 +1,9 @@
+{
+ "prefixes": [
+ "lib/ansible/"
+ ],
+ "extensions": [
+ ".py"
+ ],
+ "output": "path-line-column-message"
+}
diff --git a/test/sanity/code-smell/required-and-default-attributes.py b/test/sanity/code-smell/required-and-default-attributes.py
new file mode 100644
index 0000000..900829d
--- /dev/null
+++ b/test/sanity/code-smell/required-and-default-attributes.py
@@ -0,0 +1,19 @@
+from __future__ import annotations
+
+import re
+import sys
+
+
+def main():
+ for path in sys.argv[1:] or sys.stdin.read().splitlines():
+ with open(path, 'r') as path_fd:
+ for line, text in enumerate(path_fd.readlines()):
+ match = re.search(r'(FieldAttribute.*(default|required).*(default|required))', text)
+
+ if match:
+ print('%s:%d:%d: use only one of `default` or `required` with `FieldAttribute`' % (
+ path, line + 1, match.start(1) + 1))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/sanity/code-smell/rstcheck.json b/test/sanity/code-smell/rstcheck.json
new file mode 100644
index 0000000..870c19f
--- /dev/null
+++ b/test/sanity/code-smell/rstcheck.json
@@ -0,0 +1,6 @@
+{
+ "output": "path-line-column-message",
+ "extensions": [
+ ".rst"
+ ]
+}
diff --git a/test/sanity/code-smell/rstcheck.py b/test/sanity/code-smell/rstcheck.py
new file mode 100644
index 0000000..99917ca
--- /dev/null
+++ b/test/sanity/code-smell/rstcheck.py
@@ -0,0 +1,62 @@
+"""Sanity test using rstcheck and sphinx."""
+from __future__ import annotations
+
+import re
+import subprocess
+import sys
+
+
+def main():
+ paths = sys.argv[1:] or sys.stdin.read().splitlines()
+
+ encoding = 'utf-8'
+
+ ignore_substitutions = (
+ 'br',
+ )
+
+ cmd = [
+ sys.executable,
+ '-m', 'rstcheck',
+ '--report', 'warning',
+ '--ignore-substitutions', ','.join(ignore_substitutions),
+ ] + paths
+
+ process = subprocess.run(cmd,
+ stdin=subprocess.DEVNULL,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ check=False,
+ )
+
+ if process.stdout:
+ raise Exception(process.stdout)
+
+ pattern = re.compile(r'^(?P<path>[^:]*):(?P<line>[0-9]+): \((?P<level>INFO|WARNING|ERROR|SEVERE)/[0-4]\) (?P<message>.*)$')
+
+ results = parse_to_list_of_dict(pattern, process.stderr.decode(encoding))
+
+ for result in results:
+ print('%s:%s:%s: %s' % (result['path'], result['line'], 0, result['message']))
+
+
+def parse_to_list_of_dict(pattern, value):
+ matched = []
+ unmatched = []
+
+ for line in value.splitlines():
+ match = re.search(pattern, line)
+
+ if match:
+ matched.append(match.groupdict())
+ else:
+ unmatched.append(line)
+
+ if unmatched:
+ raise Exception('Pattern "%s" did not match values:\n%s' % (pattern, '\n'.join(unmatched)))
+
+ return matched
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/sanity/code-smell/rstcheck.requirements.in b/test/sanity/code-smell/rstcheck.requirements.in
new file mode 100644
index 0000000..5b93841
--- /dev/null
+++ b/test/sanity/code-smell/rstcheck.requirements.in
@@ -0,0 +1,3 @@
+sphinx == 4.2.0 # required for full rstcheck functionality, installed first to get the correct docutils version
+rstcheck < 4 # match version used in other sanity tests
+jinja2 # ansible-core requirement
diff --git a/test/sanity/code-smell/rstcheck.requirements.txt b/test/sanity/code-smell/rstcheck.requirements.txt
new file mode 100644
index 0000000..81d5c4f
--- /dev/null
+++ b/test/sanity/code-smell/rstcheck.requirements.txt
@@ -0,0 +1,25 @@
+# edit "rstcheck.requirements.in" and generate with: hacking/update-sanity-requirements.py --test rstcheck
+alabaster==0.7.12
+Babel==2.10.3
+certifi==2022.9.14
+charset-normalizer==2.1.1
+docutils==0.17.1
+idna==3.4
+imagesize==1.4.1
+Jinja2==3.1.2
+MarkupSafe==2.1.1
+packaging==21.3
+Pygments==2.13.0
+pyparsing==3.0.9
+pytz==2022.2.1
+requests==2.28.1
+rstcheck==3.5.0
+snowballstemmer==2.2.0
+Sphinx==4.2.0
+sphinxcontrib-applehelp==1.0.2
+sphinxcontrib-devhelp==1.0.2
+sphinxcontrib-htmlhelp==2.0.0
+sphinxcontrib-jsmath==1.0.1
+sphinxcontrib-qthelp==1.0.3
+sphinxcontrib-serializinghtml==1.1.5
+urllib3==1.26.12
diff --git a/test/sanity/code-smell/skip.txt b/test/sanity/code-smell/skip.txt
new file mode 100644
index 0000000..6fb327b
--- /dev/null
+++ b/test/sanity/code-smell/skip.txt
@@ -0,0 +1,2 @@
+deprecated-config.py # disabled by default, to be enabled by the release manager after branching
+update-bundled.py # disabled by default, to be enabled by the release manager after branching
diff --git a/test/sanity/code-smell/test-constraints.json b/test/sanity/code-smell/test-constraints.json
new file mode 100644
index 0000000..8f47beb
--- /dev/null
+++ b/test/sanity/code-smell/test-constraints.json
@@ -0,0 +1,11 @@
+{
+ "all_targets": true,
+ "prefixes": [
+ "test/lib/ansible_test/_data/requirements/",
+ "test/sanity/code-smell/"
+ ],
+ "extensions": [
+ ".txt"
+ ],
+ "output": "path-line-column-message"
+}
diff --git a/test/sanity/code-smell/test-constraints.py b/test/sanity/code-smell/test-constraints.py
new file mode 100644
index 0000000..df30fe1
--- /dev/null
+++ b/test/sanity/code-smell/test-constraints.py
@@ -0,0 +1,126 @@
+from __future__ import annotations
+
+import os
+import pathlib
+import re
+import sys
+
+
+def main():
+ constraints_path = 'test/lib/ansible_test/_data/requirements/constraints.txt'
+
+ requirements = {}
+
+ for path in sys.argv[1:] or sys.stdin.read().splitlines():
+ if path == 'test/lib/ansible_test/_data/requirements/ansible.txt':
+ # This file is an exact copy of the ansible requirements.txt and should not conflict with other constraints.
+ continue
+
+ with open(path, 'r') as path_fd:
+ requirements[path] = parse_requirements(path_fd.read().splitlines())
+
+ if path == 'test/lib/ansible_test/_data/requirements/ansible-test.txt':
+ # Special handling is required for ansible-test's requirements file.
+ check_ansible_test(path, requirements.pop(path))
+ continue
+
+ frozen_sanity = {}
+ non_sanity_requirements = set()
+
+ for path, requirements in requirements.items():
+ filename = os.path.basename(path)
+
+ is_sanity = filename.startswith('sanity.') or filename.endswith('.requirements.txt')
+ is_constraints = path == constraints_path
+
+ for lineno, line, requirement in requirements:
+ if not requirement:
+ print('%s:%d:%d: cannot parse requirement: %s' % (path, lineno, 1, line))
+ continue
+
+ name = requirement.group('name').lower()
+ raw_constraints = requirement.group('constraints')
+ constraints = raw_constraints.strip()
+ comment = requirement.group('comment')
+
+ is_pinned = re.search('^ *== *[0-9.]+(\\.post[0-9]+)?$', constraints)
+
+ if is_sanity:
+ sanity = frozen_sanity.setdefault(name, [])
+ sanity.append((path, lineno, line, requirement))
+ elif not is_constraints:
+ non_sanity_requirements.add(name)
+
+ if is_sanity:
+ if not is_pinned:
+ # sanity test requirements must be pinned
+ print('%s:%d:%d: sanity test requirement (%s%s) must be frozen (use `==`)' % (path, lineno, 1, name, raw_constraints))
+
+ continue
+
+ if constraints and not is_constraints:
+ allow_constraints = 'sanity_ok' in comment
+
+ if not allow_constraints:
+ # keeping constraints for tests other than sanity tests in one file helps avoid conflicts
+ print('%s:%d:%d: put the constraint (%s%s) in `%s`' % (path, lineno, 1, name, raw_constraints, constraints_path))
+
+ for name, requirements in frozen_sanity.items():
+ if len(set(req[3].group('constraints').strip() for req in requirements)) != 1:
+ for req in requirements:
+ print('%s:%d:%d: sanity constraint (%s) does not match others for package `%s`' % (
+ req[0], req[1], req[3].start('constraints') + 1, req[3].group('constraints'), name))
+
+
+def check_ansible_test(path: str, requirements: list[tuple[int, str, re.Match]]) -> None:
+ sys.path.insert(0, str(pathlib.Path(__file__).parent.parent.parent.joinpath('lib')))
+
+ from ansible_test._internal.python_requirements import VIRTUALENV_VERSION
+ from ansible_test._internal.coverage_util import COVERAGE_VERSIONS
+ from ansible_test._internal.util import version_to_str
+
+ expected_lines = set([
+ f"virtualenv == {VIRTUALENV_VERSION} ; python_version < '3'",
+ ] + [
+ f"coverage == {item.coverage_version} ; python_version >= '{version_to_str(item.min_python)}' and python_version <= '{version_to_str(item.max_python)}'"
+ for item in COVERAGE_VERSIONS
+ ])
+
+ for idx, requirement in enumerate(requirements):
+ lineno, line, match = requirement
+
+ if line in expected_lines:
+ expected_lines.remove(line)
+ continue
+
+ print('%s:%d:%d: unexpected line: %s' % (path, lineno, 1, line))
+
+ for expected_line in sorted(expected_lines):
+ print('%s:%d:%d: missing line: %s' % (path, requirements[-1][0] + 1, 1, expected_line))
+
+
+def parse_requirements(lines):
+ # see https://www.python.org/dev/peps/pep-0508/#names
+ pattern = re.compile(r'^(?P<name>[A-Z0-9][A-Z0-9._-]*[A-Z0-9]|[A-Z0-9])(?P<extras> *\[[^]]*])?(?P<constraints>[^;#]*)(?P<markers>[^#]*)(?P<comment>.*)$',
+ re.IGNORECASE)
+
+ matches = [(lineno, line, pattern.search(line)) for lineno, line in enumerate(lines, start=1)]
+ requirements = []
+
+ for lineno, line, match in matches:
+ if not line.strip():
+ continue
+
+ if line.strip().startswith('#'):
+ continue
+
+ if line.startswith('git+https://'):
+ continue # hack to ignore git requirements
+
+ requirements.append((lineno, line, match))
+
+ return requirements
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/sanity/code-smell/update-bundled.json b/test/sanity/code-smell/update-bundled.json
new file mode 100644
index 0000000..379bf4d
--- /dev/null
+++ b/test/sanity/code-smell/update-bundled.json
@@ -0,0 +1,8 @@
+{
+ "all_targets": true,
+ "ignore_self": true,
+ "extensions": [
+ ".py"
+ ],
+ "output": "path-message"
+}
diff --git a/test/sanity/code-smell/update-bundled.py b/test/sanity/code-smell/update-bundled.py
new file mode 100644
index 0000000..4bad77a
--- /dev/null
+++ b/test/sanity/code-smell/update-bundled.py
@@ -0,0 +1,178 @@
+# -*- coding: utf-8 -*-
+# (c) 2018, Ansible Project
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+"""
+This test checks whether the libraries we're bundling are out of date and need to be synced with
+a newer upstream release.
+"""
+
+
+from __future__ import annotations
+
+import fnmatch
+import json
+import re
+import sys
+from ansible.module_utils.compat.version import LooseVersion
+
+import packaging.specifiers
+
+from ansible.module_utils.urls import open_url
+
+
+BUNDLED_RE = re.compile(b'\\b_BUNDLED_METADATA\\b')
+
+
+def get_bundled_libs(paths):
+ """
+ Return the set of known bundled libraries
+
+ :arg paths: The paths which the test has been instructed to check
+ :returns: The list of all files which we know to contain bundled libraries. If a bundled
+ library consists of multiple files, this should be the file which has metadata included.
+ """
+ bundled_libs = set()
+ for filename in fnmatch.filter(paths, 'lib/ansible/compat/*/__init__.py'):
+ bundled_libs.add(filename)
+
+ bundled_libs.add('lib/ansible/module_utils/compat/selectors.py')
+ bundled_libs.add('lib/ansible/module_utils/distro/__init__.py')
+ bundled_libs.add('lib/ansible/module_utils/six/__init__.py')
+ # backports.ssl_match_hostname should be moved to its own file in the future
+ bundled_libs.add('lib/ansible/module_utils/urls.py')
+
+ return bundled_libs
+
+
+def get_files_with_bundled_metadata(paths):
+ """
+ Search for any files which have bundled metadata inside of them
+
+ :arg paths: Iterable of filenames to search for metadata inside of
+ :returns: A set of pathnames which contained metadata
+ """
+
+ with_metadata = set()
+ for path in paths:
+ with open(path, 'rb') as f:
+ body = f.read()
+
+ if BUNDLED_RE.search(body):
+ with_metadata.add(path)
+
+ return with_metadata
+
+
+def get_bundled_metadata(filename):
+ """
+ Retrieve the metadata about a bundled library from a python file
+
+ :arg filename: The filename to look inside for the metadata
+ :raises ValueError: If we're unable to extract metadata from the file
+ :returns: The metadata from the python file
+ """
+ with open(filename, 'r') as module:
+ for line in module:
+ if line.strip().startswith('# NOT_BUNDLED'):
+ return None
+
+ if line.strip().startswith('# CANT_UPDATE'):
+ print(
+ '{0} marked as CANT_UPDATE, so skipping. Manual '
+ 'check for CVEs required.'.format(filename))
+ return None
+
+ if line.strip().startswith('_BUNDLED_METADATA'):
+ data = line[line.index('{'):].strip()
+ break
+ else:
+ raise ValueError('Unable to check bundled library for update. Please add'
+ ' _BUNDLED_METADATA dictionary to the library file with'
+ ' information on pypi name and bundled version.')
+ metadata = json.loads(data)
+ return metadata
+
+
+def get_latest_applicable_version(pypi_data, constraints=None):
+ """Get the latest pypi version of the package that we allow
+
+ :arg pypi_data: Pypi information about the data as returned by
+ ``https://pypi.org/pypi/{pkg_name}/json``
+ :kwarg constraints: version constraints on what we're allowed to use as specified by
+ the bundled metadata
+ :returns: The most recent version on pypi that are allowed by ``constraints``
+ """
+ latest_version = "0"
+ if constraints:
+ version_specification = packaging.specifiers.SpecifierSet(constraints)
+ for version in pypi_data['releases']:
+ if version in version_specification:
+ if LooseVersion(version) > LooseVersion(latest_version):
+ latest_version = version
+ else:
+ latest_version = pypi_data['info']['version']
+
+ return latest_version
+
+
+def main():
+ """Entrypoint to the script"""
+
+ paths = sys.argv[1:] or sys.stdin.read().splitlines()
+
+ bundled_libs = get_bundled_libs(paths)
+ files_with_bundled_metadata = get_files_with_bundled_metadata(paths)
+
+ for filename in files_with_bundled_metadata.difference(bundled_libs):
+ if filename.startswith('test/support/'):
+ continue # bundled support code does not need to be updated or tracked
+
+ print('{0}: ERROR: File contains _BUNDLED_METADATA but needs to be added to'
+ ' test/sanity/code-smell/update-bundled.py'.format(filename))
+
+ for filename in bundled_libs:
+ try:
+ metadata = get_bundled_metadata(filename)
+ except ValueError as e:
+ print('{0}: ERROR: {1}'.format(filename, e))
+ continue
+ except (IOError, OSError) as e:
+ if e.errno == 2:
+ print('{0}: ERROR: {1}. Perhaps the bundled library has been removed'
+ ' or moved and the bundled library test needs to be modified as'
+ ' well?'.format(filename, e))
+
+ if metadata is None:
+ continue
+
+ pypi_fh = open_url('https://pypi.org/pypi/{0}/json'.format(metadata['pypi_name']))
+ pypi_data = json.loads(pypi_fh.read().decode('utf-8'))
+
+ constraints = metadata.get('version_constraints', None)
+ latest_version = get_latest_applicable_version(pypi_data, constraints)
+
+ if LooseVersion(metadata['version']) < LooseVersion(latest_version):
+ print('{0}: UPDATE {1} from {2} to {3} {4}'.format(
+ filename,
+ metadata['pypi_name'],
+ metadata['version'],
+ latest_version,
+ 'https://pypi.org/pypi/{0}/json'.format(metadata['pypi_name'])))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/sanity/code-smell/update-bundled.requirements.in b/test/sanity/code-smell/update-bundled.requirements.in
new file mode 100644
index 0000000..748809f
--- /dev/null
+++ b/test/sanity/code-smell/update-bundled.requirements.in
@@ -0,0 +1 @@
+packaging
diff --git a/test/sanity/code-smell/update-bundled.requirements.txt b/test/sanity/code-smell/update-bundled.requirements.txt
new file mode 100644
index 0000000..d9785e7
--- /dev/null
+++ b/test/sanity/code-smell/update-bundled.requirements.txt
@@ -0,0 +1,3 @@
+# edit "update-bundled.requirements.in" and generate with: hacking/update-sanity-requirements.py --test update-bundled
+packaging==21.3
+pyparsing==3.0.9