diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-05-14 20:03:01 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-05-14 20:03:01 +0000 |
commit | a453ac31f3428614cceb99027f8efbdb9258a40b (patch) | |
tree | f61f87408f32a8511cbd91799f9cececb53e0374 /test/sanity/code-smell | |
parent | Initial commit. (diff) | |
download | ansible-a453ac31f3428614cceb99027f8efbdb9258a40b.tar.xz ansible-a453ac31f3428614cceb99027f8efbdb9258a40b.zip |
Adding upstream version 2.10.7+merged+base+2.10.8+dfsg.upstream/2.10.7+merged+base+2.10.8+dfsgupstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'test/sanity/code-smell')
26 files changed, 1115 insertions, 0 deletions
diff --git a/test/sanity/code-smell/configure-remoting-ps1.json b/test/sanity/code-smell/configure-remoting-ps1.json new file mode 100644 index 00000000..593b765d --- /dev/null +++ b/test/sanity/code-smell/configure-remoting-ps1.json @@ -0,0 +1,4 @@ +{ + "no_targets": true, + "output": "path-message" +} diff --git a/test/sanity/code-smell/configure-remoting-ps1.py b/test/sanity/code-smell/configure-remoting-ps1.py new file mode 100755 index 00000000..51dff20c --- /dev/null +++ b/test/sanity/code-smell/configure-remoting-ps1.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os + + +def main(): + # required by external automated processes and should not be moved, renamed or converted to a symbolic link + original = 'examples/scripts/ConfigureRemotingForAnsible.ps1' + # required to be packaged with ansible-test and must match the original file, but cannot be a symbolic link + # the packaged version is needed to run tests when ansible-test has been installed + # keeping the packaged version identical to the original makes sure tests cover both files + packaged = 'test/lib/ansible_test/_data/setup/ConfigureRemotingForAnsible.ps1' + + copy_valid = False + + if os.path.isfile(original) and os.path.isfile(packaged): + with open(original, 'rb') as original_file: + original_content = original_file.read() + + with open(packaged, 'rb') as packaged_file: + packaged_content = packaged_file.read() + + if original_content == packaged_content: + copy_valid = True + + if not copy_valid: + print('%s: must be an exact copy of "%s"' % (packaged, original)) + + for path in [original, packaged]: + directory = path + + while True: + directory = os.path.dirname(directory) + + if not directory: + break + + if not os.path.isdir(directory): + print('%s: must be a directory' % directory) + + if os.path.islink(directory): + print('%s: cannot be a symbolic link' % directory) + + if not os.path.isfile(path): + print('%s: must be a file' % path) + + if os.path.islink(path): + print('%s: cannot be a symbolic link' % path) + + +if __name__ == '__main__': + main() diff --git a/test/sanity/code-smell/deprecated-config.json b/test/sanity/code-smell/deprecated-config.json new file mode 100644 index 00000000..4a884860 --- /dev/null +++ b/test/sanity/code-smell/deprecated-config.json @@ -0,0 +1,10 @@ +{ + "all_targets": true, + "output": "path-message", + "extensions": [ + ".py" + ], + "prefixes": [ + "lib/ansible/" + ] +} diff --git a/test/sanity/code-smell/deprecated-config.py b/test/sanity/code-smell/deprecated-config.py new file mode 100755 index 00000000..08e93c36 --- /dev/null +++ b/test/sanity/code-smell/deprecated-config.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# (c) 2018, Matt Martz <matt@sivel.net> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import mmap +import os +import re +import sys + +from distutils.version import StrictVersion + +import yaml + +import ansible.config + +from ansible.plugins.loader import fragment_loader +from ansible.release import __version__ as ansible_version +from ansible.utils.plugin_docs import get_docstring + +DOC_RE = re.compile(b'^DOCUMENTATION', flags=re.M) +ANSIBLE_MAJOR = StrictVersion('.'.join(ansible_version.split('.')[:2])) + + +def find_deprecations(obj, path=None): + if not isinstance(obj, (list, dict)): + return + + try: + items = obj.items() + except AttributeError: + items = enumerate(obj) + + for key, value in items: + if path is None: + this_path = [] + else: + this_path = path[:] + + this_path.append(key) + + if key != 'deprecated': + for result in find_deprecations(value, path=this_path): + yield result + else: + try: + version = value['version'] + this_path.append('version') + except KeyError: + version = value['removed_in'] + this_path.append('removed_in') + if StrictVersion(version) <= ANSIBLE_MAJOR: + yield (this_path, version) + + +def main(): + plugins = [] + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'rb') as f: + try: + mm_file = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) + except ValueError: + continue + if DOC_RE.search(mm_file): + plugins.append(path) + mm_file.close() + + for plugin in plugins: + data = {} + data['doc'], data['examples'], data['return'], data['metadata'] = get_docstring(plugin, fragment_loader) + for result in find_deprecations(data['doc']): + print( + '%s: %s is scheduled for removal in %s' % (plugin, '.'.join(str(i) for i in result[0][:-2]), result[1]) + ) + + base = os.path.join(os.path.dirname(ansible.config.__file__), 'base.yml') + with open(base) as f: + data = yaml.safe_load(f) + + for result in find_deprecations(data): + print('%s: %s is scheduled for removal in %s' % (base, '.'.join(str(i) for i in result[0][:-2]), result[1])) + + +if __name__ == '__main__': + main() diff --git a/test/sanity/code-smell/deprecated-config.requirements.txt b/test/sanity/code-smell/deprecated-config.requirements.txt new file mode 100644 index 00000000..cfefdeec --- /dev/null +++ b/test/sanity/code-smell/deprecated-config.requirements.txt @@ -0,0 +1,2 @@ +jinja2 # ansible-base requirement +pyyaml diff --git a/test/sanity/code-smell/docs-build.json b/test/sanity/code-smell/docs-build.json new file mode 100644 index 00000000..0218bfc5 --- /dev/null +++ b/test/sanity/code-smell/docs-build.json @@ -0,0 +1,6 @@ +{ + "intercept": true, + "disabled": true, + "no_targets": true, + "output": "path-line-column-message" +} diff --git a/test/sanity/code-smell/docs-build.py b/test/sanity/code-smell/docs-build.py new file mode 100755 index 00000000..80eca15f --- /dev/null +++ b/test/sanity/code-smell/docs-build.py @@ -0,0 +1,155 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import re +import shutil +import subprocess +import sys +import tempfile + + +def main(): + base_dir = os.getcwd() + os.path.sep + docs_dir = os.path.abspath('docs/docsite') + + # TODO: Remove this temporary hack to constrain 'cryptography' when we have + # a better story for dealing with it. + tmpfd, tmp = tempfile.mkstemp() + requirements_txt = os.path.join(base_dir, 'requirements.txt') + shutil.copy2(requirements_txt, tmp) + lines = [] + with open(requirements_txt, 'r') as f: + for line in f.readlines(): + if line.strip() == 'cryptography': + line = 'cryptography < 3.4\n' + lines.append(line) + + with open(requirements_txt, 'w') as f: + f.writelines(lines) + + try: + cmd = ['make', 'core_singlehtmldocs'] + sphinx = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=docs_dir) + stdout, stderr = sphinx.communicate() + finally: + shutil.move(tmp, requirements_txt) + + stdout = stdout.decode('utf-8') + stderr = stderr.decode('utf-8') + + if sphinx.returncode != 0: + sys.stderr.write("Command '%s' failed with status code: %d\n" % (' '.join(cmd), sphinx.returncode)) + + if stdout.strip(): + stdout = simplify_stdout(stdout) + + sys.stderr.write("--> Standard Output\n") + sys.stderr.write("%s\n" % stdout.strip()) + + if stderr.strip(): + sys.stderr.write("--> Standard Error\n") + sys.stderr.write("%s\n" % stderr.strip()) + + sys.exit(1) + + with open('docs/docsite/rst_warnings', 'r') as warnings_fd: + output = warnings_fd.read().strip() + lines = output.splitlines() + + known_warnings = { + 'block-quote-missing-blank-line': r'^Block quote ends without a blank line; unexpected unindent.$', + 'literal-block-lex-error': r'^Could not lex literal_block as "[^"]*". Highlighting skipped.$', + 'duplicate-label': r'^duplicate label ', + 'undefined-label': r'undefined label: ', + 'unknown-document': r'unknown document: ', + 'toc-tree-missing-document': r'toctree contains reference to nonexisting document ', + 'reference-target-not-found': r'[^ ]* reference target not found: ', + 'not-in-toc-tree': r"document isn't included in any toctree$", + 'unexpected-indentation': r'^Unexpected indentation.$', + 'definition-list-missing-blank-line': r'^Definition list ends without a blank line; unexpected unindent.$', + 'explicit-markup-missing-blank-line': r'Explicit markup ends without a blank line; unexpected unindent.$', + 'toc-tree-glob-pattern-no-match': r"^toctree glob pattern '[^']*' didn't match any documents$", + 'unknown-interpreted-text-role': '^Unknown interpreted text role "[^"]*".$', + } + + for line in lines: + match = re.search('^(?P<path>[^:]+):((?P<line>[0-9]+):)?((?P<column>[0-9]+):)? (?P<level>WARNING|ERROR): (?P<message>.*)$', line) + + if not match: + path = 'docs/docsite/rst/index.rst' + lineno = 0 + column = 0 + code = 'unknown' + message = line + + # surface unknown lines while filtering out known lines to avoid excessive output + print('%s:%d:%d: %s: %s' % (path, lineno, column, code, message)) + continue + + path = match.group('path') + lineno = int(match.group('line') or 0) + column = int(match.group('column') or 0) + level = match.group('level').lower() + message = match.group('message') + + path = os.path.abspath(path) + + if path.startswith(base_dir): + path = path[len(base_dir):] + + if path.startswith('rst/'): + path = 'docs/docsite/' + path # fix up paths reported relative to `docs/docsite/` + + if level == 'warning': + code = 'warning' + + for label, pattern in known_warnings.items(): + if re.search(pattern, message): + code = label + break + else: + code = 'error' + + print('%s:%d:%d: %s: %s' % (path, lineno, column, code, message)) + + +def simplify_stdout(value): + """Simplify output by omitting earlier 'rendering: ...' messages.""" + lines = value.strip().splitlines() + + rendering = [] + keep = [] + + def truncate_rendering(): + """Keep last rendering line (if any) with a message about omitted lines as needed.""" + if not rendering: + return + + notice = rendering[-1] + + if len(rendering) > 1: + notice += ' (%d previous rendering line(s) omitted)' % (len(rendering) - 1) + + keep.append(notice) + # Could change to rendering.clear() if we do not support python2 + rendering[:] = [] + + for line in lines: + if line.startswith('rendering: '): + rendering.append(line) + continue + + truncate_rendering() + keep.append(line) + + truncate_rendering() + + result = '\n'.join(keep) + + return result + + +if __name__ == '__main__': + main() diff --git a/test/sanity/code-smell/docs-build.requirements.txt b/test/sanity/code-smell/docs-build.requirements.txt new file mode 100644 index 00000000..5e458795 --- /dev/null +++ b/test/sanity/code-smell/docs-build.requirements.txt @@ -0,0 +1,6 @@ +jinja2 +pyyaml +sphinx +sphinx-notfound-page +straight.plugin +antsibull diff --git a/test/sanity/code-smell/no-unwanted-files.json b/test/sanity/code-smell/no-unwanted-files.json new file mode 100644 index 00000000..7a89ebbe --- /dev/null +++ b/test/sanity/code-smell/no-unwanted-files.json @@ -0,0 +1,7 @@ +{ + "include_symlinks": true, + "prefixes": [ + "lib/" + ], + "output": "path-message" +} diff --git a/test/sanity/code-smell/no-unwanted-files.py b/test/sanity/code-smell/no-unwanted-files.py new file mode 100755 index 00000000..bff09152 --- /dev/null +++ b/test/sanity/code-smell/no-unwanted-files.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python +"""Prevent unwanted files from being added to the source tree.""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import sys + + +def main(): + """Main entry point.""" + paths = sys.argv[1:] or sys.stdin.read().splitlines() + + allowed_extensions = ( + '.cs', + '.ps1', + '.psm1', + '.py', + ) + + skip_paths = set([ + 'lib/ansible/config/ansible_builtin_runtime.yml', # not included in the sanity ignore file since it won't exist until after migration + ]) + + skip_directories = ( + 'lib/ansible/galaxy/data/', + ) + + for path in paths: + if path in skip_paths: + continue + + if any(path.startswith(skip_directory) for skip_directory in skip_directories): + continue + + if path.startswith('lib/') and not path.startswith('lib/ansible/'): + print('%s: all "lib" content must reside in the "lib/ansible" directory' % path) + continue + + ext = os.path.splitext(path)[1] + + if ext not in allowed_extensions: + print('%s: extension must be one of: %s' % (path, ', '.join(allowed_extensions))) + + +if __name__ == '__main__': + main() diff --git a/test/sanity/code-smell/obsolete-files.json b/test/sanity/code-smell/obsolete-files.json new file mode 100644 index 00000000..02d39204 --- /dev/null +++ b/test/sanity/code-smell/obsolete-files.json @@ -0,0 +1,17 @@ +{ + "include_symlinks": true, + "prefixes": [ + "test/runner/", + "test/sanity/ansible-doc/", + "test/sanity/compile/", + "test/sanity/import/", + "test/sanity/pep8/", + "test/sanity/pslint/", + "test/sanity/pylint/", + "test/sanity/rstcheck/", + "test/sanity/shellcheck/", + "test/sanity/validate-modules/", + "test/sanity/yamllint/" + ], + "output": "path-message" +} diff --git a/test/sanity/code-smell/obsolete-files.py b/test/sanity/code-smell/obsolete-files.py new file mode 100755 index 00000000..e9ddc8a5 --- /dev/null +++ b/test/sanity/code-smell/obsolete-files.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python +"""Prevent files from being added to directories that are now obsolete.""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import sys + + +def main(): + """Main entry point.""" + paths = sys.argv[1:] or sys.stdin.read().splitlines() + + for path in paths: + print('%s: directory "%s/" is obsolete and should not contain any files' % (path, os.path.dirname(path))) + + +if __name__ == '__main__': + main() diff --git a/test/sanity/code-smell/package-data.json b/test/sanity/code-smell/package-data.json new file mode 100644 index 00000000..2b8a5326 --- /dev/null +++ b/test/sanity/code-smell/package-data.json @@ -0,0 +1,6 @@ +{ + "intercept": true, + "disabled": true, + "all_targets": true, + "output": "path-message" +} diff --git a/test/sanity/code-smell/package-data.py b/test/sanity/code-smell/package-data.py new file mode 100755 index 00000000..ca5f5ef5 --- /dev/null +++ b/test/sanity/code-smell/package-data.py @@ -0,0 +1,379 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import contextlib +import fnmatch +import glob +import os +import re +import shutil +import subprocess +import sys +import tarfile +import tempfile + + +def assemble_files_to_ship(complete_file_list): + """ + This looks for all files which should be shipped in the sdist + """ + # All files which are in the repository except these: + ignore_patterns = ( + # Developer-only tools + '.azure-pipelines/*', + '.github/*', + '.github/*/*', + 'changelogs/fragments/*', + 'hacking/backport/*', + 'hacking/shippable/*', + 'hacking/tests/*', + 'hacking/ticket_stubs/*', + 'test/sanity/code-smell/botmeta.*', + 'test/utils/*', + 'test/utils/*/*', + 'test/utils/*/*/*', + '.git*', + ) + ignore_files = frozenset(( + # Developer-only tools + 'changelogs/config.yaml', + 'hacking/README.md', + 'hacking/ansible-profile', + 'hacking/cgroup_perf_recap_graph.py', + 'hacking/create_deprecated_issues.py', + 'hacking/deprecated_issue_template.md', + 'hacking/fix_test_syntax.py', + 'hacking/get_library.py', + 'hacking/metadata-tool.py', + 'hacking/report.py', + 'hacking/return_skeleton_generator.py', + 'hacking/test-module', + 'hacking/test-module.py', + 'test/support/README.md', + '.cherry_picker.toml', + '.mailmap', + # Generated as part of a build step + 'docs/docsite/rst/conf.py', + 'docs/docsite/rst/index.rst', + # Possibly should be included + 'examples/scripts/uptime.py', + 'examples/scripts/my_test.py', + 'examples/scripts/my_test_info.py', + 'examples/scripts/my_test_facts.py', + 'examples/DOCUMENTATION.yml', + 'examples/play.yml', + 'examples/hosts.yaml', + 'examples/hosts.yml', + 'examples/inventory_script_schema.json', + 'examples/plugin_filters.yml', + 'hacking/env-setup', + 'hacking/env-setup.fish', + 'MANIFEST', + )) + + # These files are generated and then intentionally added to the sdist + + # Manpages + manpages = ['docs/man/man1/ansible.1'] + for dirname, dummy, files in os.walk('bin'): + for filename in files: + path = os.path.join(dirname, filename) + if os.path.islink(path): + if os.readlink(path) == 'ansible': + manpages.append('docs/man/man1/%s.1' % filename) + + # Misc + misc_generated_files = [ + 'SYMLINK_CACHE.json', + 'PKG-INFO', + ] + + shipped_files = manpages + misc_generated_files + + for path in complete_file_list: + if path not in ignore_files: + for ignore in ignore_patterns: + if fnmatch.fnmatch(path, ignore): + break + else: + shipped_files.append(path) + + return shipped_files + + +def assemble_files_to_install(complete_file_list): + """ + This looks for all of the files which should show up in an installation of ansible + """ + ignore_patterns = tuple() + + pkg_data_files = [] + for path in complete_file_list: + + if path.startswith("lib/ansible"): + prefix = 'lib' + elif path.startswith("test/lib/ansible_test"): + prefix = 'test/lib' + else: + continue + + for ignore in ignore_patterns: + if fnmatch.fnmatch(path, ignore): + break + else: + pkg_data_files.append(os.path.relpath(path, prefix)) + + return pkg_data_files + + +@contextlib.contextmanager +def clean_repository(file_list): + """Copy the repository to clean it of artifacts""" + # Create a tempdir that will be the clean repo + with tempfile.TemporaryDirectory() as repo_root: + directories = set((repo_root + os.path.sep,)) + + for filename in file_list: + # Determine if we need to create the directory + directory = os.path.dirname(filename) + dest_dir = os.path.join(repo_root, directory) + if dest_dir not in directories: + os.makedirs(dest_dir) + + # Keep track of all the directories that now exist + path_components = directory.split(os.path.sep) + path = repo_root + for component in path_components: + path = os.path.join(path, component) + if path not in directories: + directories.add(path) + + # Copy the file + shutil.copy2(filename, dest_dir, follow_symlinks=False) + + yield repo_root + + +def create_sdist(tmp_dir): + """Create an sdist in the repository""" + create = subprocess.Popen( + ['make', 'snapshot', 'SDIST_DIR=%s' % tmp_dir], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, + ) + + stderr = create.communicate()[1] + + if create.returncode != 0: + raise Exception('make snapshot failed:\n%s' % stderr) + + # Determine path to sdist + tmp_dir_files = os.listdir(tmp_dir) + + if not tmp_dir_files: + raise Exception('sdist was not created in the temp dir') + elif len(tmp_dir_files) > 1: + raise Exception('Unexpected extra files in the temp dir') + + return os.path.join(tmp_dir, tmp_dir_files[0]) + + +def extract_sdist(sdist_path, tmp_dir): + """Untar the sdist""" + # Untar the sdist from the tmp_dir + with tarfile.open(os.path.join(tmp_dir, sdist_path), 'r|*') as sdist: + sdist.extractall(path=tmp_dir) + + # Determine the sdist directory name + sdist_filename = os.path.basename(sdist_path) + tmp_dir_files = os.listdir(tmp_dir) + try: + tmp_dir_files.remove(sdist_filename) + except ValueError: + # Unexpected could not find original sdist in temp dir + raise + + if len(tmp_dir_files) > 1: + raise Exception('Unexpected extra files in the temp dir') + elif len(tmp_dir_files) < 1: + raise Exception('sdist extraction did not occur i nthe temp dir') + + return os.path.join(tmp_dir, tmp_dir_files[0]) + + +def install_sdist(tmp_dir, sdist_dir): + """Install the extracted sdist into the temporary directory""" + install = subprocess.Popen( + ['python', 'setup.py', 'install', '--root=%s' % tmp_dir], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, + cwd=os.path.join(tmp_dir, sdist_dir), + ) + + stdout, stderr = install.communicate() + + if install.returncode != 0: + raise Exception('sdist install failed:\n%s' % stderr) + + # Determine the prefix for the installed files + match = re.search('^creating (%s/.*?/(?:site|dist)-packages)/ansible$' % + tmp_dir, stdout, flags=re.M) + return match.group(1) + + +def check_sdist_contains_expected(sdist_dir, to_ship_files): + """Check that the files we expect to ship are present in the sdist""" + results = [] + for filename in to_ship_files: + path = os.path.join(sdist_dir, filename) + if not os.path.exists(path): + results.append('%s: File was not added to sdist' % filename) + + # Also changelog + changelog_files = glob.glob(os.path.join(sdist_dir, 'changelogs/CHANGELOG-v2.[0-9]*.rst')) + if not changelog_files: + results.append('changelogs/CHANGELOG-v2.*.rst: Changelog file was not added to the sdist') + elif len(changelog_files) > 1: + results.append('changelogs/CHANGELOG-v2.*.rst: Too many changelog files: %s' + % changelog_files) + + return results + + +def check_sdist_files_are_wanted(sdist_dir, to_ship_files): + """Check that all files in the sdist are desired""" + results = [] + for dirname, dummy, files in os.walk(sdist_dir): + dirname = os.path.relpath(dirname, start=sdist_dir) + if dirname == '.': + dirname = '' + + for filename in files: + path = os.path.join(dirname, filename) + if path not in to_ship_files: + if fnmatch.fnmatch(path, 'changelogs/CHANGELOG-v2.[0-9]*.rst'): + # changelog files are expected + continue + + # FIXME: ansible-test doesn't pass the paths of symlinks to us so we aren't + # checking those + if not os.path.islink(os.path.join(sdist_dir, path)): + results.append('%s: File in sdist was not in the repository' % path) + + return results + + +def check_installed_contains_expected(install_dir, to_install_files): + """Check that all the files we expect to be installed are""" + results = [] + for filename in to_install_files: + path = os.path.join(install_dir, filename) + if not os.path.exists(path): + results.append('%s: File not installed' % os.path.join('lib', filename)) + + return results + + +EGG_RE = re.compile('ansible[^/]+\\.egg-info/(PKG-INFO|SOURCES.txt|' + 'dependency_links.txt|not-zip-safe|requires.txt|top_level.txt)$') + + +def check_installed_files_are_wanted(install_dir, to_install_files): + """Check that all installed files were desired""" + results = [] + + for dirname, dummy, files in os.walk(install_dir): + dirname = os.path.relpath(dirname, start=install_dir) + if dirname == '.': + dirname = '' + + for filename in files: + # If this is a byte code cache, look for the python file's name + directory = dirname + if filename.endswith('.pyc') or filename.endswith('.pyo'): + # Remove the trailing "o" or c" + filename = filename[:-1] + + if directory.endswith('%s__pycache__' % os.path.sep): + # Python3 byte code cache, look for the basename of + # __pycache__/__init__.cpython-36.py + segments = filename.rsplit('.', 2) + if len(segments) >= 3: + filename = '.'.join((segments[0], segments[2])) + directory = os.path.dirname(directory) + + path = os.path.join(directory, filename) + + # Test that the file was listed for installation + if path not in to_install_files: + # FIXME: ansible-test doesn't pass the paths of symlinks to us so we + # aren't checking those + if not os.path.islink(os.path.join(install_dir, path)): + if not EGG_RE.match(path): + results.append('%s: File was installed but was not supposed to be' % path) + + return results + + +def _find_symlinks(): + symlink_list = [] + for dirname, directories, filenames in os.walk('.'): + for filename in filenames: + path = os.path.join(dirname, filename) + # Strip off "./" from the front + path = path[2:] + if os.path.islink(path): + symlink_list.append(path) + + return symlink_list + + +def main(): + """All of the files in the repository""" + complete_file_list = [] + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + complete_file_list.append(path) + + # ansible-test isn't currently passing symlinks to us so construct those ourselves for now + for filename in _find_symlinks(): + if filename not in complete_file_list: + # For some reason ansible-test is passing us lib/ansible/module_utils/ansible_release.py + # which is a symlink even though it doesn't pass any others + complete_file_list.append(filename) + + # We may run this after docs sanity tests so get a clean repository to run in + with clean_repository(complete_file_list) as clean_repo_dir: + os.chdir(clean_repo_dir) + + to_ship_files = assemble_files_to_ship(complete_file_list) + to_install_files = assemble_files_to_install(complete_file_list) + + results = [] + with tempfile.TemporaryDirectory() as tmp_dir: + sdist_path = create_sdist(tmp_dir) + sdist_dir = extract_sdist(sdist_path, tmp_dir) + + # Check that the files that are supposed to be in the sdist are there + results.extend(check_sdist_contains_expected(sdist_dir, to_ship_files)) + + # Check that the files that are in the sdist are in the repository + results.extend(check_sdist_files_are_wanted(sdist_dir, to_ship_files)) + + # install the sdist + install_dir = install_sdist(tmp_dir, sdist_dir) + + # Check that the files that are supposed to be installed are there + results.extend(check_installed_contains_expected(install_dir, to_install_files)) + + # Check that the files that are installed are supposed to be installed + results.extend(check_installed_files_are_wanted(install_dir, to_install_files)) + + for message in results: + print(message) + + +if __name__ == '__main__': + main() diff --git a/test/sanity/code-smell/package-data.requirements.txt b/test/sanity/code-smell/package-data.requirements.txt new file mode 100644 index 00000000..5d74c715 --- /dev/null +++ b/test/sanity/code-smell/package-data.requirements.txt @@ -0,0 +1,10 @@ +docutils +jinja2 +packaging +pyyaml # ansible-base requirement +rstcheck +setuptools > 39.2 +straight.plugin + +# changelog build requires python 3.6+ +antsibull-changelog ; python_version >= '3.6' diff --git a/test/sanity/code-smell/release-names.json b/test/sanity/code-smell/release-names.json new file mode 100644 index 00000000..593b765d --- /dev/null +++ b/test/sanity/code-smell/release-names.json @@ -0,0 +1,4 @@ +{ + "no_targets": true, + "output": "path-message" +} diff --git a/test/sanity/code-smell/release-names.py b/test/sanity/code-smell/release-names.py new file mode 100755 index 00000000..f8003320 --- /dev/null +++ b/test/sanity/code-smell/release-names.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# (c) 2019, Ansible Project +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. +""" +Test that the release name is present in the list of used up release names +""" + + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +from yaml import safe_load + +from ansible.release import __codename__ + + +def main(): + """Entrypoint to the script""" + + with open('.github/RELEASE_NAMES.yml') as f: + releases = safe_load(f.read()) + + # Why this format? The file's sole purpose is to be read by a human when they need to know + # which release names have already been used. So: + # 1) It's easier for a human to find the release names when there's one on each line + # 2) It helps keep other people from using the file and then asking for new features in it + for name in (r.split(maxsplit=1)[1] for r in releases): + if __codename__ == name: + break + else: + print('.github/RELEASE_NAMES.yml: Current codename was not present in the file') + + +if __name__ == '__main__': + main() diff --git a/test/sanity/code-smell/release-names.requirements.txt b/test/sanity/code-smell/release-names.requirements.txt new file mode 100644 index 00000000..c3726e8b --- /dev/null +++ b/test/sanity/code-smell/release-names.requirements.txt @@ -0,0 +1 @@ +pyyaml diff --git a/test/sanity/code-smell/required-and-default-attributes.json b/test/sanity/code-smell/required-and-default-attributes.json new file mode 100644 index 00000000..dd9ac7b1 --- /dev/null +++ b/test/sanity/code-smell/required-and-default-attributes.json @@ -0,0 +1,9 @@ +{ + "prefixes": [ + "lib/ansible/" + ], + "extensions": [ + ".py" + ], + "output": "path-line-column-message" +} diff --git a/test/sanity/code-smell/required-and-default-attributes.py b/test/sanity/code-smell/required-and-default-attributes.py new file mode 100755 index 00000000..5ef410bd --- /dev/null +++ b/test/sanity/code-smell/required-and-default-attributes.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'r') as path_fd: + for line, text in enumerate(path_fd.readlines()): + match = re.search(r'(FieldAttribute.*(default|required).*(default|required))', text) + + if match: + print('%s:%d:%d: use only one of `default` or `required` with `FieldAttribute`' % ( + path, line + 1, match.start(1) + 1)) + + +if __name__ == '__main__': + main() diff --git a/test/sanity/code-smell/skip.txt b/test/sanity/code-smell/skip.txt new file mode 100644 index 00000000..6fb327b3 --- /dev/null +++ b/test/sanity/code-smell/skip.txt @@ -0,0 +1,2 @@ +deprecated-config.py # disabled by default, to be enabled by the release manager after branching +update-bundled.py # disabled by default, to be enabled by the release manager after branching diff --git a/test/sanity/code-smell/test-constraints.json b/test/sanity/code-smell/test-constraints.json new file mode 100644 index 00000000..69b07bf3 --- /dev/null +++ b/test/sanity/code-smell/test-constraints.json @@ -0,0 +1,9 @@ +{ + "prefixes": [ + "test/lib/ansible_test/_data/requirements/" + ], + "extensions": [ + ".txt" + ], + "output": "path-line-column-message" +} diff --git a/test/sanity/code-smell/test-constraints.py b/test/sanity/code-smell/test-constraints.py new file mode 100755 index 00000000..e8b9c795 --- /dev/null +++ b/test/sanity/code-smell/test-constraints.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'r') as path_fd: + for line, text in enumerate(path_fd.readlines()): + match = re.search(r'^[^;#]*?([<>=])(?!.*sanity_ok.*)', text) + + if match: + print('%s:%d:%d: put constraints in `test/lib/ansible_test/_data/requirements/constraints.txt`' % ( + path, line + 1, match.start(1) + 1)) + + +if __name__ == '__main__': + main() diff --git a/test/sanity/code-smell/update-bundled.json b/test/sanity/code-smell/update-bundled.json new file mode 100644 index 00000000..379bf4d7 --- /dev/null +++ b/test/sanity/code-smell/update-bundled.json @@ -0,0 +1,8 @@ +{ + "all_targets": true, + "ignore_self": true, + "extensions": [ + ".py" + ], + "output": "path-message" +} diff --git a/test/sanity/code-smell/update-bundled.py b/test/sanity/code-smell/update-bundled.py new file mode 100755 index 00000000..121e225f --- /dev/null +++ b/test/sanity/code-smell/update-bundled.py @@ -0,0 +1,165 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# (c) 2018, Ansible Project +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. +""" +This test checks whether the libraries we're bundling are out of date and need to be synced with +a newer upstream release. +""" + + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import fnmatch +import json +import re +import sys +from distutils.version import LooseVersion + +import packaging.specifiers + +from ansible.module_utils.urls import open_url + + +BUNDLED_RE = re.compile(b'\\b_BUNDLED_METADATA\\b') + + +def get_bundled_libs(paths): + """ + Return the set of known bundled libraries + + :arg paths: The paths which the test has been instructed to check + :returns: The list of all files which we know to contain bundled libraries. If a bundled + library consists of multiple files, this should be the file which has metadata included. + """ + bundled_libs = set() + for filename in fnmatch.filter(paths, 'lib/ansible/compat/*/__init__.py'): + bundled_libs.add(filename) + + bundled_libs.add('lib/ansible/module_utils/distro/__init__.py') + bundled_libs.add('lib/ansible/module_utils/six/__init__.py') + bundled_libs.add('lib/ansible/module_utils/compat/ipaddress.py') + # backports.ssl_match_hostname should be moved to its own file in the future + bundled_libs.add('lib/ansible/module_utils/urls.py') + + return bundled_libs + + +def get_files_with_bundled_metadata(paths): + """ + Search for any files which have bundled metadata inside of them + + :arg paths: Iterable of filenames to search for metadata inside of + :returns: A set of pathnames which contained metadata + """ + + with_metadata = set() + for path in paths: + with open(path, 'rb') as f: + body = f.read() + + if BUNDLED_RE.search(body): + with_metadata.add(path) + + return with_metadata + + +def get_bundled_metadata(filename): + """ + Retrieve the metadata about a bundled library from a python file + + :arg filename: The filename to look inside for the metadata + :raises ValueError: If we're unable to extract metadata from the file + :returns: The metadata from the python file + """ + with open(filename, 'r') as module: + for line in module: + if line.strip().startswith('_BUNDLED_METADATA'): + data = line[line.index('{'):].strip() + break + else: + raise ValueError('Unable to check bundled library for update. Please add' + ' _BUNDLED_METADATA dictionary to the library file with' + ' information on pypi name and bundled version.') + metadata = json.loads(data) + return metadata + + +def get_latest_applicable_version(pypi_data, constraints=None): + """Get the latest pypi version of the package that we allow + + :arg pypi_data: Pypi information about the data as returned by + ``https://pypi.org/pypi/{pkg_name}/json`` + :kwarg constraints: version constraints on what we're allowed to use as specified by + the bundled metadata + :returns: The most recent version on pypi that are allowed by ``constraints`` + """ + latest_version = "0" + if constraints: + version_specification = packaging.specifiers.SpecifierSet(constraints) + for version in pypi_data['releases']: + if version in version_specification: + if LooseVersion(version) > LooseVersion(latest_version): + latest_version = version + else: + latest_version = pypi_data['info']['version'] + + return latest_version + + +def main(): + """Entrypoint to the script""" + + paths = sys.argv[1:] or sys.stdin.read().splitlines() + + bundled_libs = get_bundled_libs(paths) + files_with_bundled_metadata = get_files_with_bundled_metadata(paths) + + for filename in files_with_bundled_metadata.difference(bundled_libs): + print('{0}: ERROR: File contains _BUNDLED_METADATA but needs to be added to' + ' test/sanity/code-smell/update-bundled.py'.format(filename)) + + for filename in bundled_libs: + try: + metadata = get_bundled_metadata(filename) + except ValueError as e: + print('{0}: ERROR: {1}'.format(filename, e)) + continue + except (IOError, OSError) as e: + if e.errno == 2: + print('{0}: ERROR: {1}. Perhaps the bundled library has been removed' + ' or moved and the bundled library test needs to be modified as' + ' well?'.format(filename, e)) + + pypi_fh = open_url('https://pypi.org/pypi/{0}/json'.format(metadata['pypi_name'])) + pypi_data = json.loads(pypi_fh.read().decode('utf-8')) + + constraints = metadata.get('version_constraints', None) + latest_version = get_latest_applicable_version(pypi_data, constraints) + + if LooseVersion(metadata['version']) < LooseVersion(latest_version): + print('{0}: UPDATE {1} from {2} to {3} {4}'.format( + filename, + metadata['pypi_name'], + metadata['version'], + latest_version, + 'https://pypi.org/pypi/{0}/json'.format(metadata['pypi_name']))) + + +if __name__ == '__main__': + main() diff --git a/test/sanity/code-smell/update-bundled.requirements.txt b/test/sanity/code-smell/update-bundled.requirements.txt new file mode 100644 index 00000000..748809f7 --- /dev/null +++ b/test/sanity/code-smell/update-bundled.requirements.txt @@ -0,0 +1 @@ +packaging |