diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-05-14 20:03:01 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-05-14 20:03:01 +0000 |
commit | a453ac31f3428614cceb99027f8efbdb9258a40b (patch) | |
tree | f61f87408f32a8511cbd91799f9cececb53e0374 /test/lib/ansible_test/_data/sanity | |
parent | Initial commit. (diff) | |
download | ansible-a453ac31f3428614cceb99027f8efbdb9258a40b.tar.xz ansible-a453ac31f3428614cceb99027f8efbdb9258a40b.zip |
Adding upstream version 2.10.7+merged+base+2.10.8+dfsg.upstream/2.10.7+merged+base+2.10.8+dfsgupstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'test/lib/ansible_test/_data/sanity')
72 files changed, 6377 insertions, 0 deletions
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/action-plugin-docs.json b/test/lib/ansible_test/_data/sanity/code-smell/action-plugin-docs.json new file mode 100644 index 00000000..12bbe0d1 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/action-plugin-docs.json @@ -0,0 +1,13 @@ +{ + "all_targets": true, + "prefixes": [ + "lib/ansible/modules/", + "lib/ansible/plugins/action/", + "plugins/modules/", + "plugins/action/" + ], + "extensions": [ + ".py" + ], + "output": "path-message" +} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/action-plugin-docs.py b/test/lib/ansible_test/_data/sanity/code-smell/action-plugin-docs.py new file mode 100755 index 00000000..65142e00 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/action-plugin-docs.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python +"""Test to verify action plugins have an associated module to provide documentation.""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import sys + + +def main(): + """Main entry point.""" + paths = sys.argv[1:] or sys.stdin.read().splitlines() + + module_names = set() + + module_prefixes = { + 'lib/ansible/modules/': True, + 'plugins/modules/': False, + } + + action_prefixes = { + 'lib/ansible/plugins/action/': True, + 'plugins/action/': False, + } + + for path in paths: + full_name = get_full_name(path, module_prefixes) + + if full_name: + module_names.add(full_name) + + for path in paths: + full_name = get_full_name(path, action_prefixes) + + if full_name and full_name not in module_names: + print('%s: action plugin has no matching module to provide documentation' % path) + + +def get_full_name(path, prefixes): + """Return the full name of the plugin at the given path by matching against the given path prefixes, or None if no match is found.""" + for prefix, flat in prefixes.items(): + if path.startswith(prefix): + relative_path = os.path.relpath(path, prefix) + + if flat: + full_name = os.path.basename(relative_path) + else: + full_name = relative_path + + full_name = os.path.splitext(full_name)[0] + + name = os.path.basename(full_name) + + if name == '__init__': + return None + + if name.startswith('_'): + name = name[1:] + + full_name = os.path.join(os.path.dirname(full_name), name).replace(os.path.sep, '.') + + return full_name + + return None + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/changelog.json b/test/lib/ansible_test/_data/sanity/code-smell/changelog.json new file mode 100644 index 00000000..87f223b1 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/changelog.json @@ -0,0 +1,9 @@ +{ + "intercept": true, + "minimum_python_version": "3.6", + "prefixes": [ + "changelogs/config.yaml", + "changelogs/fragments/" + ], + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/changelog.py b/test/lib/ansible_test/_data/sanity/code-smell/changelog.py new file mode 100755 index 00000000..710b10f6 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/changelog.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import sys +import subprocess + + +def main(): + paths = sys.argv[1:] or sys.stdin.read().splitlines() + + allowed_extensions = ('.yml', '.yaml') + config_path = 'changelogs/config.yaml' + + # config must be detected independent of the file list since the file list only contains files under test (changed) + has_config = os.path.exists(config_path) + paths_to_check = [] + for path in paths: + if path == config_path: + continue + + if path.startswith('changelogs/fragments/.'): + if path in ('changelogs/fragments/.keep', 'changelogs/fragments/.gitkeep'): + continue + + print('%s:%d:%d: file must not be a dotfile' % (path, 0, 0)) + continue + + ext = os.path.splitext(path)[1] + + if ext not in allowed_extensions: + print('%s:%d:%d: extension must be one of: %s' % (path, 0, 0, ', '.join(allowed_extensions))) + + paths_to_check.append(path) + + if not has_config: + print('changelogs/config.yaml:0:0: config file does not exist') + return + + if not paths_to_check: + return + + cmd = [sys.executable, '-m', 'antsibull_changelog', 'lint'] + paths_to_check + subprocess.call(cmd) # ignore the return code, rely on the output instead + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/empty-init.json b/test/lib/ansible_test/_data/sanity/code-smell/empty-init.json new file mode 100644 index 00000000..9835f9b6 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/empty-init.json @@ -0,0 +1,14 @@ +{ + "prefixes": [ + "lib/ansible/modules/", + "lib/ansible/module_utils/", + "plugins/modules/", + "plugins/module_utils/", + "test/units/", + "tests/unit/" + ], + "files": [ + "__init__.py" + ], + "output": "path-message" +} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/empty-init.py b/test/lib/ansible_test/_data/sanity/code-smell/empty-init.py new file mode 100755 index 00000000..8bcd7f9e --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/empty-init.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + if os.path.getsize(path) > 0: + print('%s: empty __init__.py required' % path) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/future-import-boilerplate.json b/test/lib/ansible_test/_data/sanity/code-smell/future-import-boilerplate.json new file mode 100644 index 00000000..6f1edb78 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/future-import-boilerplate.json @@ -0,0 +1,6 @@ +{ + "extensions": [ + ".py" + ], + "output": "path-message" +} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/future-import-boilerplate.py b/test/lib/ansible_test/_data/sanity/code-smell/future-import-boilerplate.py new file mode 100755 index 00000000..81081eed --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/future-import-boilerplate.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import ast +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'rb') as path_fd: + lines = path_fd.read().splitlines() + + missing = True + if not lines: + # Files are allowed to be empty of everything including boilerplate + missing = False + + for text in lines: + if text in (b'from __future__ import (absolute_import, division, print_function)', + b'from __future__ import absolute_import, division, print_function'): + missing = False + break + + if missing: + with open(path) as file: + contents = file.read() + + # noinspection PyBroadException + try: + node = ast.parse(contents) + + # files consisting of only assignments have no need for future import boilerplate + # the only exception would be division during assignment, but we'll overlook that for simplicity + # the most likely case is that of a documentation only python file + if all(isinstance(statement, ast.Assign) for statement in node.body): + missing = False + except Exception: # pylint: disable=broad-except + pass # the compile sanity test will report this error + + if missing: + print('%s: missing: from __future__ import (absolute_import, division, print_function)' % path) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/line-endings.json b/test/lib/ansible_test/_data/sanity/code-smell/line-endings.json new file mode 100644 index 00000000..db5c3c98 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/line-endings.json @@ -0,0 +1,4 @@ +{ + "text": true, + "output": "path-message" +} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/line-endings.py b/test/lib/ansible_test/_data/sanity/code-smell/line-endings.py new file mode 100755 index 00000000..1e4212d1 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/line-endings.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'rb') as path_fd: + contents = path_fd.read() + + if b'\r' in contents: + print('%s: use "\\n" for line endings instead of "\\r\\n"' % path) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/metaclass-boilerplate.json b/test/lib/ansible_test/_data/sanity/code-smell/metaclass-boilerplate.json new file mode 100644 index 00000000..6f1edb78 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/metaclass-boilerplate.json @@ -0,0 +1,6 @@ +{ + "extensions": [ + ".py" + ], + "output": "path-message" +} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/metaclass-boilerplate.py b/test/lib/ansible_test/_data/sanity/code-smell/metaclass-boilerplate.py new file mode 100755 index 00000000..28d06f36 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/metaclass-boilerplate.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import ast +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'rb') as path_fd: + lines = path_fd.read().splitlines() + + missing = True + if not lines: + # Files are allowed to be empty of everything including boilerplate + missing = False + + for text in lines: + if text == b'__metaclass__ = type': + missing = False + break + + if missing: + with open(path) as file: + contents = file.read() + + # noinspection PyBroadException + try: + node = ast.parse(contents) + + # files consisting of only assignments have no need for metaclass boilerplate + # the most likely case is that of a documentation only python file + if all(isinstance(statement, ast.Assign) for statement in node.body): + missing = False + except Exception: # pylint: disable=broad-except + pass # the compile sanity test will report this error + + if missing: + print('%s: missing: __metaclass__ = type' % path) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-assert.json b/test/lib/ansible_test/_data/sanity/code-smell/no-assert.json new file mode 100644 index 00000000..ccee80a2 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/no-assert.json @@ -0,0 +1,10 @@ +{ + "extensions": [ + ".py" + ], + "prefixes": [ + "lib/ansible/", + "plugins/" + ], + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-assert.py b/test/lib/ansible_test/_data/sanity/code-smell/no-assert.py new file mode 100755 index 00000000..78561d96 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/no-assert.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +import sys + +ASSERT_RE = re.compile(r'^\s*assert[^a-z0-9_:]') + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'r') as f: + for i, line in enumerate(f.readlines()): + matches = ASSERT_RE.findall(line) + + if matches: + lineno = i + 1 + colno = line.index('assert') + 1 + print('%s:%d:%d: raise AssertionError instead of: %s' % (path, lineno, colno, matches[0][colno - 1:])) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-basestring.json b/test/lib/ansible_test/_data/sanity/code-smell/no-basestring.json new file mode 100644 index 00000000..88858aeb --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/no-basestring.json @@ -0,0 +1,7 @@ +{ + "extensions": [ + ".py" + ], + "ignore_self": true, + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-basestring.py b/test/lib/ansible_test/_data/sanity/code-smell/no-basestring.py new file mode 100755 index 00000000..a35650ef --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/no-basestring.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'r') as path_fd: + for line, text in enumerate(path_fd.readlines()): + match = re.search(r'(isinstance.*basestring)', text) + + if match: + print('%s:%d:%d: do not use `isinstance(s, basestring)`' % ( + path, line + 1, match.start(1) + 1)) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iteritems.json b/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iteritems.json new file mode 100644 index 00000000..88858aeb --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iteritems.json @@ -0,0 +1,7 @@ +{ + "extensions": [ + ".py" + ], + "ignore_self": true, + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iteritems.py b/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iteritems.py new file mode 100755 index 00000000..e28b24f4 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iteritems.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'r') as path_fd: + for line, text in enumerate(path_fd.readlines()): + match = re.search(r'(?<! six)\.(iteritems)', text) + + if match: + print('%s:%d:%d: use `dict.items` or `ansible.module_utils.six.iteritems` instead of `dict.iteritems`' % ( + path, line + 1, match.start(1) + 1)) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iterkeys.json b/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iterkeys.json new file mode 100644 index 00000000..88858aeb --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iterkeys.json @@ -0,0 +1,7 @@ +{ + "extensions": [ + ".py" + ], + "ignore_self": true, + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iterkeys.py b/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iterkeys.py new file mode 100755 index 00000000..237ee5b1 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iterkeys.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'r') as path_fd: + for line, text in enumerate(path_fd.readlines()): + match = re.search(r'\.(iterkeys)', text) + + if match: + print('%s:%d:%d: use `dict.keys` or `for key in dict:` instead of `dict.iterkeys`' % ( + path, line + 1, match.start(1) + 1)) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-dict-itervalues.json b/test/lib/ansible_test/_data/sanity/code-smell/no-dict-itervalues.json new file mode 100644 index 00000000..88858aeb --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/no-dict-itervalues.json @@ -0,0 +1,7 @@ +{ + "extensions": [ + ".py" + ], + "ignore_self": true, + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-dict-itervalues.py b/test/lib/ansible_test/_data/sanity/code-smell/no-dict-itervalues.py new file mode 100755 index 00000000..4bf92ea9 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/no-dict-itervalues.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'r') as path_fd: + for line, text in enumerate(path_fd.readlines()): + match = re.search(r'(?<! six)\.(itervalues)', text) + + if match: + print('%s:%d:%d: use `dict.values` or `ansible.module_utils.six.itervalues` instead of `dict.itervalues`' % ( + path, line + 1, match.start(1) + 1)) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-get-exception.json b/test/lib/ansible_test/_data/sanity/code-smell/no-get-exception.json new file mode 100644 index 00000000..88858aeb --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/no-get-exception.json @@ -0,0 +1,7 @@ +{ + "extensions": [ + ".py" + ], + "ignore_self": true, + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-get-exception.py b/test/lib/ansible_test/_data/sanity/code-smell/no-get-exception.py new file mode 100755 index 00000000..c925f5b7 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/no-get-exception.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +import sys + + +def main(): + basic_allow_once = True + + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'r') as path_fd: + for line, text in enumerate(path_fd.readlines()): + match = re.search(r'([^a-zA-Z0-9_]get_exception[^a-zA-Z0-9_])', text) + + if match: + if path == 'lib/ansible/module_utils/basic.py' and basic_allow_once: + # basic.py is allowed to import get_exception for backwards compatibility but should not call it anywhere + basic_allow_once = False + continue + + print('%s:%d:%d: do not use `get_exception`' % ( + path, line + 1, match.start(1) + 1)) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-illegal-filenames.json b/test/lib/ansible_test/_data/sanity/code-smell/no-illegal-filenames.json new file mode 100644 index 00000000..6f13c86b --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/no-illegal-filenames.json @@ -0,0 +1,5 @@ +{ + "include_directories": true, + "include_symlinks": true, + "output": "path-message" +} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-illegal-filenames.py b/test/lib/ansible_test/_data/sanity/code-smell/no-illegal-filenames.py new file mode 100755 index 00000000..99432ea1 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/no-illegal-filenames.py @@ -0,0 +1,82 @@ +#!/usr/bin/env python + +# a script to check for illegal filenames on various Operating Systems. The +# main rules are derived from restrictions on Windows +# https://msdn.microsoft.com/en-us/library/aa365247#naming_conventions +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import struct +import sys + +from ansible.module_utils.basic import to_bytes + +ILLEGAL_CHARS = [ + b'<', + b'>', + b':', + b'"', + b'/', + b'\\', + b'|', + b'?', + b'*' +] + [struct.pack("b", i) for i in range(32)] + +ILLEGAL_NAMES = [ + "CON", + "PRN", + "AUX", + "NUL", + "COM1", + "COM2", + "COM3", + "COM4", + "COM5", + "COM6", + "COM7", + "COM8", + "COM9", + "LPT1", + "LPT2", + "LPT3", + "LPT4", + "LPT5", + "LPT6", + "LPT7", + "LPT8", + "LPT9", +] + +ILLEGAL_END_CHARS = [ + '.', + ' ', +] + + +def check_path(path, is_dir=False): + type_name = 'directory' if is_dir else 'file' + file_name = os.path.basename(path.rstrip(os.path.sep)) + name = os.path.splitext(file_name)[0] + + if name.upper() in ILLEGAL_NAMES: + print("%s: illegal %s name %s" % (path, type_name, name.upper())) + + if file_name[-1] in ILLEGAL_END_CHARS: + print("%s: illegal %s name end-char '%s'" % (path, type_name, file_name[-1])) + + bfile = to_bytes(file_name, encoding='utf-8') + for char in ILLEGAL_CHARS: + if char in bfile: + bpath = to_bytes(path, encoding='utf-8') + print("%s: illegal char '%s' in %s name" % (bpath, char, type_name)) + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + check_path(path, is_dir=path.endswith(os.path.sep)) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-main-display.json b/test/lib/ansible_test/_data/sanity/code-smell/no-main-display.json new file mode 100644 index 00000000..ccee80a2 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/no-main-display.json @@ -0,0 +1,10 @@ +{ + "extensions": [ + ".py" + ], + "prefixes": [ + "lib/ansible/", + "plugins/" + ], + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-main-display.py b/test/lib/ansible_test/_data/sanity/code-smell/no-main-display.py new file mode 100755 index 00000000..74a36ecc --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/no-main-display.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import sys + +MAIN_DISPLAY_IMPORT = 'from __main__ import display' + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'r') as f: + for i, line in enumerate(f.readlines()): + if MAIN_DISPLAY_IMPORT in line: + lineno = i + 1 + colno = line.index(MAIN_DISPLAY_IMPORT) + 1 + print('%s:%d:%d: Display is a singleton, just import and instantiate' % (path, lineno, colno)) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-smart-quotes.json b/test/lib/ansible_test/_data/sanity/code-smell/no-smart-quotes.json new file mode 100644 index 00000000..54d9fff5 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/no-smart-quotes.json @@ -0,0 +1,5 @@ +{ + "text": true, + "ignore_self": true, + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-smart-quotes.py b/test/lib/ansible_test/_data/sanity/code-smell/no-smart-quotes.py new file mode 100755 index 00000000..e44005a5 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/no-smart-quotes.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'rb') as path_fd: + for line, text in enumerate(path_fd.readlines()): + try: + text = text.decode('utf-8') + except UnicodeDecodeError as ex: + print('%s:%d:%d: UnicodeDecodeError: %s' % (path, line + 1, ex.start + 1, ex)) + continue + + match = re.search(u'([‘’“”])', text) + + if match: + print('%s:%d:%d: use ASCII quotes `\'` and `"` instead of Unicode quotes' % ( + path, line + 1, match.start(1) + 1)) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-unicode-literals.json b/test/lib/ansible_test/_data/sanity/code-smell/no-unicode-literals.json new file mode 100644 index 00000000..88858aeb --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/no-unicode-literals.json @@ -0,0 +1,7 @@ +{ + "extensions": [ + ".py" + ], + "ignore_self": true, + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-unicode-literals.py b/test/lib/ansible_test/_data/sanity/code-smell/no-unicode-literals.py new file mode 100755 index 00000000..e2201ab1 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/no-unicode-literals.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'r') as path_fd: + for line, text in enumerate(path_fd.readlines()): + match = re.search(r'(unicode_literals)', text) + + if match: + print('%s:%d:%d: do not use `unicode_literals`' % ( + path, line + 1, match.start(1) + 1)) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/replace-urlopen.json b/test/lib/ansible_test/_data/sanity/code-smell/replace-urlopen.json new file mode 100644 index 00000000..88858aeb --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/replace-urlopen.json @@ -0,0 +1,7 @@ +{ + "extensions": [ + ".py" + ], + "ignore_self": true, + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/replace-urlopen.py b/test/lib/ansible_test/_data/sanity/code-smell/replace-urlopen.py new file mode 100755 index 00000000..b2de1ba8 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/replace-urlopen.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'r') as path_fd: + for line, text in enumerate(path_fd.readlines()): + match = re.search(r'^(?:[^#]*?)(urlopen)', text) + + if match: + print('%s:%d:%d: use `ansible.module_utils.urls.open_url` instead of `urlopen`' % ( + path, line + 1, match.start(1) + 1)) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/runtime-metadata.json b/test/lib/ansible_test/_data/sanity/code-smell/runtime-metadata.json new file mode 100644 index 00000000..44003ec0 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/runtime-metadata.json @@ -0,0 +1,11 @@ +{ + "prefixes": [ + "lib/ansible/config/ansible_builtin_runtime.yml", + "meta/routing.yml", + "meta/runtime.yml" + ], + "extensions": [ + ".yml" + ], + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/runtime-metadata.py b/test/lib/ansible_test/_data/sanity/code-smell/runtime-metadata.py new file mode 100755 index 00000000..b986db2b --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/runtime-metadata.py @@ -0,0 +1,150 @@ +#!/usr/bin/env python +"""Schema validation of ansible-base's ansible_builtin_runtime.yml and collection's meta/runtime.yml""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import datetime +import os +import re +import sys +import yaml + +from voluptuous import Any, MultipleInvalid, PREVENT_EXTRA +from voluptuous import Required, Schema, Invalid +from voluptuous.humanize import humanize_error + +from ansible.module_utils.six import string_types + + +def isodate(value): + """Validate a datetime.date or ISO 8601 date string.""" + # datetime.date objects come from YAML dates, these are ok + if isinstance(value, datetime.date): + return value + # make sure we have a string + msg = 'Expected ISO 8601 date string (YYYY-MM-DD), or YAML date' + if not isinstance(value, string_types): + raise Invalid(msg) + try: + datetime.datetime.strptime(value, '%Y-%m-%d').date() + except ValueError: + raise Invalid(msg) + return value + + +def validate_metadata_file(path): + """Validate explicit runtime metadata file""" + try: + with open(path, 'r') as f_path: + routing = yaml.safe_load(f_path) + except yaml.error.MarkedYAMLError as ex: + print('%s:%d:%d: YAML load failed: %s' % (path, ex.context_mark.line + + 1, ex.context_mark.column + 1, re.sub(r'\s+', ' ', str(ex)))) + return + except Exception as ex: # pylint: disable=broad-except + print('%s:%d:%d: YAML load failed: %s' % + (path, 0, 0, re.sub(r'\s+', ' ', str(ex)))) + return + + # Updates to schema MUST also be reflected in the documentation + # ~https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html + + # plugin_routing schema + + deprecation_tombstoning_schema = Any(Schema( + { + Required('removal_date'): Any(isodate), + 'warning_text': Any(*string_types), + }, + extra=PREVENT_EXTRA + ), Schema( + { + Required('removal_version'): Any(*string_types), + 'warning_text': Any(*string_types), + }, + extra=PREVENT_EXTRA + )) + + plugin_routing_schema = Any( + Schema({ + ('deprecation'): Any(deprecation_tombstoning_schema), + ('tombstone'): Any(deprecation_tombstoning_schema), + ('redirect'): Any(*string_types), + }, extra=PREVENT_EXTRA), + ) + + list_dict_plugin_routing_schema = [{str_type: plugin_routing_schema} + for str_type in string_types] + + plugin_schema = Schema({ + ('action'): Any(None, *list_dict_plugin_routing_schema), + ('become'): Any(None, *list_dict_plugin_routing_schema), + ('cache'): Any(None, *list_dict_plugin_routing_schema), + ('callback'): Any(None, *list_dict_plugin_routing_schema), + ('cliconf'): Any(None, *list_dict_plugin_routing_schema), + ('connection'): Any(None, *list_dict_plugin_routing_schema), + ('doc_fragments'): Any(None, *list_dict_plugin_routing_schema), + ('filter'): Any(None, *list_dict_plugin_routing_schema), + ('httpapi'): Any(None, *list_dict_plugin_routing_schema), + ('inventory'): Any(None, *list_dict_plugin_routing_schema), + ('lookup'): Any(None, *list_dict_plugin_routing_schema), + ('module_utils'): Any(None, *list_dict_plugin_routing_schema), + ('modules'): Any(None, *list_dict_plugin_routing_schema), + ('netconf'): Any(None, *list_dict_plugin_routing_schema), + ('shell'): Any(None, *list_dict_plugin_routing_schema), + ('strategy'): Any(None, *list_dict_plugin_routing_schema), + ('terminal'): Any(None, *list_dict_plugin_routing_schema), + ('test'): Any(None, *list_dict_plugin_routing_schema), + ('vars'): Any(None, *list_dict_plugin_routing_schema), + }, extra=PREVENT_EXTRA) + + # import_redirection schema + + import_redirection_schema = Any( + Schema({ + ('redirect'): Any(*string_types), + # import_redirect doesn't currently support deprecation + }, extra=PREVENT_EXTRA) + ) + + list_dict_import_redirection_schema = [{str_type: import_redirection_schema} + for str_type in string_types] + + # top level schema + + schema = Schema({ + # All of these are optional + ('plugin_routing'): Any(plugin_schema), + ('import_redirection'): Any(None, *list_dict_import_redirection_schema), + # requires_ansible: In the future we should validate this with SpecifierSet + ('requires_ansible'): Any(*string_types), + ('action_groups'): dict, + }, extra=PREVENT_EXTRA) + + # Ensure schema is valid + + try: + schema(routing) + except MultipleInvalid as ex: + for error in ex.errors: + # No way to get line/column numbers + print('%s:%d:%d: %s' % (path, 0, 0, humanize_error(routing, error))) + + +def main(): + """Validate runtime metadata""" + paths = sys.argv[1:] or sys.stdin.read().splitlines() + + collection_legacy_file = 'meta/routing.yml' + collection_runtime_file = 'meta/runtime.yml' + + for path in paths: + if path == collection_legacy_file: + print('%s:%d:%d: %s' % (path, 0, 0, ("Should be called '%s'" % collection_runtime_file))) + continue + + validate_metadata_file(path) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/shebang.json b/test/lib/ansible_test/_data/sanity/code-smell/shebang.json new file mode 100644 index 00000000..5648429e --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/shebang.json @@ -0,0 +1,4 @@ +{ + "text": true, + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/shebang.py b/test/lib/ansible_test/_data/sanity/code-smell/shebang.py new file mode 100755 index 00000000..7cf3cf72 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/shebang.py @@ -0,0 +1,120 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import re +import stat +import sys + + +def main(): + standard_shebangs = set([ + b'#!/bin/bash -eu', + b'#!/bin/bash -eux', + b'#!/bin/sh', + b'#!/usr/bin/env bash', + b'#!/usr/bin/env fish', + b'#!/usr/bin/env pwsh', + b'#!/usr/bin/env python', + b'#!/usr/bin/make -f', + ]) + + integration_shebangs = set([ + b'#!/bin/sh', + b'#!/usr/bin/env bash', + b'#!/usr/bin/env python', + ]) + + module_shebangs = { + '': b'#!/usr/bin/python', + '.py': b'#!/usr/bin/python', + '.ps1': b'#!powershell', + } + + # see https://unicode.org/faq/utf_bom.html#bom1 + byte_order_marks = ( + (b'\x00\x00\xFE\xFF', 'UTF-32 (BE)'), + (b'\xFF\xFE\x00\x00', 'UTF-32 (LE)'), + (b'\xFE\xFF', 'UTF-16 (BE)'), + (b'\xFF\xFE', 'UTF-16 (LE)'), + (b'\xEF\xBB\xBF', 'UTF-8'), + ) + + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'rb') as path_fd: + shebang = path_fd.readline().strip() + mode = os.stat(path).st_mode + executable = (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) & mode + + if not shebang or not shebang.startswith(b'#!'): + if executable: + print('%s:%d:%d: file without shebang should not be executable' % (path, 0, 0)) + + for mark, name in byte_order_marks: + if shebang.startswith(mark): + print('%s:%d:%d: file starts with a %s byte order mark' % (path, 0, 0, name)) + break + + continue + + is_module = False + is_integration = False + + dirname = os.path.dirname(path) + + if path.startswith('lib/ansible/modules/'): + is_module = True + elif re.search('^test/support/[^/]+/plugins/modules/', path): + is_module = True + elif re.search('^test/support/[^/]+/collections/ansible_collections/[^/]+/[^/]+/plugins/modules/', path): + is_module = True + elif path.startswith('test/lib/ansible_test/_data/'): + pass + elif path.startswith('lib/') or path.startswith('test/lib/'): + if executable: + print('%s:%d:%d: should not be executable' % (path, 0, 0)) + + if shebang: + print('%s:%d:%d: should not have a shebang' % (path, 0, 0)) + + continue + elif path.startswith('test/integration/targets/') or path.startswith('tests/integration/targets/'): + is_integration = True + + if dirname.endswith('/library') or '/plugins/modules' in dirname or dirname in ( + # non-standard module library directories + 'test/integration/targets/module_precedence/lib_no_extension', + 'test/integration/targets/module_precedence/lib_with_extension', + ): + is_module = True + elif path.startswith('plugins/modules/'): + is_module = True + + if is_module: + if executable: + print('%s:%d:%d: module should not be executable' % (path, 0, 0)) + + ext = os.path.splitext(path)[1] + expected_shebang = module_shebangs.get(ext) + expected_ext = ' or '.join(['"%s"' % k for k in module_shebangs]) + + if expected_shebang: + if shebang == expected_shebang: + continue + + print('%s:%d:%d: expected module shebang "%s" but found: %s' % (path, 1, 1, expected_shebang, shebang)) + else: + print('%s:%d:%d: expected module extension %s but found: %s' % (path, 0, 0, expected_ext, ext)) + else: + if is_integration: + allowed = integration_shebangs + else: + allowed = standard_shebangs + + if shebang not in allowed: + print('%s:%d:%d: unexpected non-module shebang: %s' % (path, 1, 1, shebang)) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/symlinks.json b/test/lib/ansible_test/_data/sanity/code-smell/symlinks.json new file mode 100644 index 00000000..6f13c86b --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/symlinks.json @@ -0,0 +1,5 @@ +{ + "include_directories": true, + "include_symlinks": true, + "output": "path-message" +} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/symlinks.py b/test/lib/ansible_test/_data/sanity/code-smell/symlinks.py new file mode 100755 index 00000000..0585c6b1 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/symlinks.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import sys + + +def main(): + root_dir = os.getcwd() + os.path.sep + + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + if not os.path.islink(path.rstrip(os.path.sep)): + continue + + if not os.path.exists(path): + print('%s: broken symlinks are not allowed' % path) + continue + + if path.endswith(os.path.sep): + print('%s: symlinks to directories are not allowed' % path) + continue + + real_path = os.path.realpath(path) + + if not real_path.startswith(root_dir): + print('%s: symlinks outside content tree are not allowed: %s' % (path, os.path.relpath(real_path, os.path.dirname(path)))) + continue + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/use-argspec-type-path.json b/test/lib/ansible_test/_data/sanity/code-smell/use-argspec-type-path.json new file mode 100644 index 00000000..36103051 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/use-argspec-type-path.json @@ -0,0 +1,10 @@ +{ + "prefixes": [ + "lib/ansible/modules/", + "plugins/modules/" + ], + "extensions": [ + ".py" + ], + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/use-argspec-type-path.py b/test/lib/ansible_test/_data/sanity/code-smell/use-argspec-type-path.py new file mode 100755 index 00000000..687136dc --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/use-argspec-type-path.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'r') as path_fd: + for line, text in enumerate(path_fd.readlines()): + match = re.search(r'(expanduser)', text) + + if match: + print('%s:%d:%d: use argspec type="path" instead of type="str" to avoid use of `expanduser`' % ( + path, line + 1, match.start(1) + 1)) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/use-compat-six.json b/test/lib/ansible_test/_data/sanity/code-smell/use-compat-six.json new file mode 100644 index 00000000..776590b7 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/use-compat-six.json @@ -0,0 +1,6 @@ +{ + "extensions": [ + ".py" + ], + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/use-compat-six.py b/test/lib/ansible_test/_data/sanity/code-smell/use-compat-six.py new file mode 100755 index 00000000..49cb76c5 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/code-smell/use-compat-six.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'r') as path_fd: + for line, text in enumerate(path_fd.readlines()): + match = re.search(r'((^\s*import\s+six\b)|(^\s*from\s+six\b))', text) + + if match: + print('%s:%d:%d: use `ansible.module_utils.six` instead of `six`' % ( + path, line + 1, match.start(1) + 1)) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_data/sanity/compile/compile.py b/test/lib/ansible_test/_data/sanity/compile/compile.py new file mode 100755 index 00000000..61910eee --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/compile/compile.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python +"""Python syntax checker with lint friendly output.""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import warnings + +with warnings.catch_warnings(): + # The parser module is deprecated as of Python 3.9. + # This implementation will need to be updated to use another solution. + # Until then, disable the deprecation warnings to prevent test failures. + warnings.simplefilter('ignore', DeprecationWarning) + import parser + +import sys + + +def main(): + status = 0 + + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'rb') as source_fd: + if sys.version_info[0] == 3: + source = source_fd.read().decode('utf-8') + else: + source = source_fd.read() + + try: + parser.suite(source) + except SyntaxError: + ex = sys.exc_info()[1] + status = 1 + message = ex.text.splitlines()[0].strip() + sys.stdout.write("%s:%d:%d: SyntaxError: %s\n" % (path, ex.lineno, ex.offset, message)) + sys.stdout.flush() + + sys.exit(status) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_data/sanity/import/importer.py b/test/lib/ansible_test/_data/sanity/import/importer.py new file mode 100755 index 00000000..ef8db71b --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/import/importer.py @@ -0,0 +1,467 @@ +#!/usr/bin/env python +"""Import the given python module(s) and report error(s) encountered.""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +def main(): + """ + Main program function used to isolate globals from imported code. + Changes to globals in imported modules on Python 2.x will overwrite our own globals. + """ + import ansible + import contextlib + import datetime + import json + import os + import re + import runpy + import subprocess + import sys + import traceback + import types + import warnings + + ansible_path = os.path.dirname(os.path.dirname(ansible.__file__)) + temp_path = os.environ['SANITY_TEMP_PATH'] + os.path.sep + external_python = os.environ.get('SANITY_EXTERNAL_PYTHON') or sys.executable + collection_full_name = os.environ.get('SANITY_COLLECTION_FULL_NAME') + collection_root = os.environ.get('ANSIBLE_COLLECTIONS_PATH') + + try: + # noinspection PyCompatibility + from importlib import import_module + except ImportError: + def import_module(name): + __import__(name) + return sys.modules[name] + + try: + # noinspection PyCompatibility + from StringIO import StringIO + except ImportError: + from io import StringIO + + if collection_full_name: + # allow importing code from collections when testing a collection + from ansible.module_utils.common.text.converters import to_bytes, to_text, to_native, text_type + from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder + from ansible.utils.collection_loader import _collection_finder + + yaml_to_json_path = os.path.join(os.path.dirname(__file__), 'yaml_to_json.py') + yaml_to_dict_cache = {} + + # unique ISO date marker matching the one present in yaml_to_json.py + iso_date_marker = 'isodate:f23983df-f3df-453c-9904-bcd08af468cc:' + iso_date_re = re.compile('^%s([0-9]{4})-([0-9]{2})-([0-9]{2})$' % iso_date_marker) + + def parse_value(value): + """Custom value parser for JSON deserialization that recognizes our internal ISO date format.""" + if isinstance(value, text_type): + match = iso_date_re.search(value) + + if match: + value = datetime.date(int(match.group(1)), int(match.group(2)), int(match.group(3))) + + return value + + def object_hook(data): + """Object hook for custom ISO date deserialization from JSON.""" + return dict((key, parse_value(value)) for key, value in data.items()) + + def yaml_to_dict(yaml, content_id): + """ + Return a Python dict version of the provided YAML. + Conversion is done in a subprocess since the current Python interpreter does not have access to PyYAML. + """ + if content_id in yaml_to_dict_cache: + return yaml_to_dict_cache[content_id] + + try: + cmd = [external_python, yaml_to_json_path] + proc = subprocess.Popen([to_bytes(c) for c in cmd], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + stdout_bytes, stderr_bytes = proc.communicate(to_bytes(yaml)) + + if proc.returncode != 0: + raise Exception('command %s failed with return code %d: %s' % ([to_native(c) for c in cmd], proc.returncode, to_native(stderr_bytes))) + + data = yaml_to_dict_cache[content_id] = json.loads(to_text(stdout_bytes), object_hook=object_hook) + + return data + except Exception as ex: + raise Exception('internal importer error - failed to parse yaml: %s' % to_native(ex)) + + _collection_finder._meta_yml_to_dict = yaml_to_dict # pylint: disable=protected-access + + collection_loader = _AnsibleCollectionFinder(paths=[collection_root]) + collection_loader._install() # pylint: disable=protected-access + else: + # do not support collection loading when not testing a collection + collection_loader = None + + # remove all modules under the ansible package + list(map(sys.modules.pop, [m for m in sys.modules if m.partition('.')[0] == ansible.__name__])) + + # pre-load an empty ansible package to prevent unwanted code in __init__.py from loading + # this more accurately reflects the environment that AnsiballZ runs modules under + # it also avoids issues with imports in the ansible package that are not allowed + ansible_module = types.ModuleType(ansible.__name__) + ansible_module.__file__ = ansible.__file__ + ansible_module.__path__ = ansible.__path__ + ansible_module.__package__ = ansible.__package__ + + sys.modules[ansible.__name__] = ansible_module + + class ImporterAnsibleModuleException(Exception): + """Exception thrown during initialization of ImporterAnsibleModule.""" + + class ImporterAnsibleModule: + """Replacement for AnsibleModule to support import testing.""" + def __init__(self, *args, **kwargs): + raise ImporterAnsibleModuleException() + + class ImportBlacklist: + """Blacklist inappropriate imports.""" + def __init__(self, path, name): + self.path = path + self.name = name + self.loaded_modules = set() + + def find_module(self, fullname, path=None): + """Return self if the given fullname is blacklisted, otherwise return None. + :param fullname: str + :param path: str + :return: ImportBlacklist | None + """ + if fullname in self.loaded_modules: + return None # ignore modules that are already being loaded + + if is_name_in_namepace(fullname, ['ansible']): + if fullname in ('ansible.module_utils.basic', 'ansible.module_utils.common.removed'): + return self # intercept loading so we can modify the result + + if is_name_in_namepace(fullname, ['ansible.module_utils', self.name]): + return None # module_utils and module under test are always allowed + + if any(os.path.exists(candidate_path) for candidate_path in convert_ansible_name_to_absolute_paths(fullname)): + return self # blacklist ansible files that exist + + return None # ansible file does not exist, do not blacklist + + if is_name_in_namepace(fullname, ['ansible_collections']): + if not collection_loader: + return self # blacklist collections when we are not testing a collection + + if is_name_in_namepace(fullname, ['ansible_collections...plugins.module_utils', self.name]): + return None # module_utils and module under test are always allowed + + if collection_loader.find_module(fullname, path): + return self # blacklist collection files that exist + + return None # collection file does not exist, do not blacklist + + # not a namespace we care about + return None + + def load_module(self, fullname): + """Raise an ImportError. + :type fullname: str + """ + if fullname == 'ansible.module_utils.basic': + module = self.__load_module(fullname) + + # stop Ansible module execution during AnsibleModule instantiation + module.AnsibleModule = ImporterAnsibleModule + # no-op for _load_params since it may be called before instantiating AnsibleModule + module._load_params = lambda *args, **kwargs: {} # pylint: disable=protected-access + + return module + + if fullname == 'ansible.module_utils.common.removed': + module = self.__load_module(fullname) + + # no-op for removed_module since it is called in place of AnsibleModule instantiation + module.removed_module = lambda *args, **kwargs: None + + return module + + raise ImportError('import of "%s" is not allowed in this context' % fullname) + + def __load_module(self, fullname): + """Load the requested module while avoiding infinite recursion. + :type fullname: str + :rtype: module + """ + self.loaded_modules.add(fullname) + return import_module(fullname) + + def run(): + """Main program function.""" + base_dir = os.getcwd() + messages = set() + + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + name = convert_relative_path_to_name(path) + test_python_module(path, name, base_dir, messages) + + if messages: + sys.exit(10) + + def test_python_module(path, name, base_dir, messages): + """Test the given python module by importing it. + :type path: str + :type name: str + :type base_dir: str + :type messages: set[str] + """ + if name in sys.modules: + return # cannot be tested because it has already been loaded + + is_ansible_module = (path.startswith('lib/ansible/modules/') or path.startswith('plugins/modules/')) and os.path.basename(path) != '__init__.py' + run_main = is_ansible_module + + if path == 'lib/ansible/modules/async_wrapper.py': + # async_wrapper is a non-standard Ansible module (does not use AnsibleModule) so we cannot test the main function + run_main = False + + capture_normal = Capture() + capture_main = Capture() + + try: + with monitor_sys_modules(path, messages): + with blacklist_imports(path, name, messages): + with capture_output(capture_normal): + import_module(name) + + if run_main: + with monitor_sys_modules(path, messages): + with blacklist_imports(path, name, messages): + with capture_output(capture_main): + runpy.run_module(name, run_name='__main__', alter_sys=True) + except ImporterAnsibleModuleException: + # module instantiated AnsibleModule without raising an exception + pass + except BaseException as ex: # pylint: disable=locally-disabled, broad-except + # intentionally catch all exceptions, including calls to sys.exit + exc_type, _exc, exc_tb = sys.exc_info() + message = str(ex) + results = list(reversed(traceback.extract_tb(exc_tb))) + line = 0 + offset = 0 + full_path = os.path.join(base_dir, path) + base_path = base_dir + os.path.sep + source = None + + # avoid line wraps in messages + message = re.sub(r'\n *', ': ', message) + + for result in results: + if result[0] == full_path: + # save the line number for the file under test + line = result[1] or 0 + + if not source and result[0].startswith(base_path) and not result[0].startswith(temp_path): + # save the first path and line number in the traceback which is in our source tree + source = (os.path.relpath(result[0], base_path), result[1] or 0, 0) + + if isinstance(ex, SyntaxError): + # SyntaxError has better information than the traceback + if ex.filename == full_path: # pylint: disable=locally-disabled, no-member + # syntax error was reported in the file under test + line = ex.lineno or 0 # pylint: disable=locally-disabled, no-member + offset = ex.offset or 0 # pylint: disable=locally-disabled, no-member + elif ex.filename.startswith(base_path) and not ex.filename.startswith(temp_path): # pylint: disable=locally-disabled, no-member + # syntax error was reported in our source tree + source = (os.path.relpath(ex.filename, base_path), ex.lineno or 0, ex.offset or 0) # pylint: disable=locally-disabled, no-member + + # remove the filename and line number from the message + # either it was extracted above, or it's not really useful information + message = re.sub(r' \(.*?, line [0-9]+\)$', '', message) + + if source and source[0] != path: + message += ' (at %s:%d:%d)' % (source[0], source[1], source[2]) + + report_message(path, line, offset, 'traceback', '%s: %s' % (exc_type.__name__, message), messages) + finally: + capture_report(path, capture_normal, messages) + capture_report(path, capture_main, messages) + + def is_name_in_namepace(name, namespaces): + """Returns True if the given name is one of the given namespaces, otherwise returns False.""" + name_parts = name.split('.') + + for namespace in namespaces: + namespace_parts = namespace.split('.') + length = min(len(name_parts), len(namespace_parts)) + + truncated_name = name_parts[0:length] + truncated_namespace = namespace_parts[0:length] + + # empty parts in the namespace are treated as wildcards + # to simplify the comparison, use those empty parts to indicate the positions in the name to be empty as well + for idx, part in enumerate(truncated_namespace): + if not part: + truncated_name[idx] = part + + # example: name=ansible, allowed_name=ansible.module_utils + # example: name=ansible.module_utils.system.ping, allowed_name=ansible.module_utils + if truncated_name == truncated_namespace: + return True + + return False + + def check_sys_modules(path, before, messages): + """Check for unwanted changes to sys.modules. + :type path: str + :type before: dict[str, module] + :type messages: set[str] + """ + after = sys.modules + removed = set(before.keys()) - set(after.keys()) + changed = set(key for key, value in before.items() if key in after and value != after[key]) + + # additions are checked by our custom PEP 302 loader, so we don't need to check them again here + + for module in sorted(removed): + report_message(path, 0, 0, 'unload', 'unloading of "%s" in sys.modules is not supported' % module, messages) + + for module in sorted(changed): + report_message(path, 0, 0, 'reload', 'reloading of "%s" in sys.modules is not supported' % module, messages) + + def convert_ansible_name_to_absolute_paths(name): + """Calculate the module path from the given name. + :type name: str + :rtype: list[str] + """ + return [ + os.path.join(ansible_path, name.replace('.', os.path.sep)), + os.path.join(ansible_path, name.replace('.', os.path.sep)) + '.py', + ] + + def convert_relative_path_to_name(path): + """Calculate the module name from the given path. + :type path: str + :rtype: str + """ + if path.endswith('/__init__.py'): + clean_path = os.path.dirname(path) + else: + clean_path = path + + clean_path = os.path.splitext(clean_path)[0] + + name = clean_path.replace(os.path.sep, '.') + + if collection_loader: + # when testing collections the relative paths (and names) being tested are within the collection under test + name = 'ansible_collections.%s.%s' % (collection_full_name, name) + else: + # when testing ansible all files being imported reside under the lib directory + name = name[len('lib/'):] + + return name + + class Capture: + """Captured output and/or exception.""" + def __init__(self): + self.stdout = StringIO() + self.stderr = StringIO() + + def capture_report(path, capture, messages): + """Report on captured output. + :type path: str + :type capture: Capture + :type messages: set[str] + """ + if capture.stdout.getvalue(): + first = capture.stdout.getvalue().strip().splitlines()[0].strip() + report_message(path, 0, 0, 'stdout', first, messages) + + if capture.stderr.getvalue(): + first = capture.stderr.getvalue().strip().splitlines()[0].strip() + report_message(path, 0, 0, 'stderr', first, messages) + + def report_message(path, line, column, code, message, messages): + """Report message if not already reported. + :type path: str + :type line: int + :type column: int + :type code: str + :type message: str + :type messages: set[str] + """ + message = '%s:%d:%d: %s: %s' % (path, line, column, code, message) + + if message not in messages: + messages.add(message) + print(message) + + @contextlib.contextmanager + def blacklist_imports(path, name, messages): + """Blacklist imports. + :type path: str + :type name: str + :type messages: set[str] + """ + blacklist = ImportBlacklist(path, name) + + sys.meta_path.insert(0, blacklist) + sys.path_importer_cache.clear() + + try: + yield + finally: + if sys.meta_path[0] != blacklist: + report_message(path, 0, 0, 'metapath', 'changes to sys.meta_path[0] are not permitted', messages) + + while blacklist in sys.meta_path: + sys.meta_path.remove(blacklist) + + sys.path_importer_cache.clear() + + @contextlib.contextmanager + def monitor_sys_modules(path, messages): + """Monitor sys.modules for unwanted changes, reverting any additions made to our own namespaces.""" + snapshot = sys.modules.copy() + + try: + yield + finally: + check_sys_modules(path, snapshot, messages) + + for key in set(sys.modules.keys()) - set(snapshot.keys()): + if is_name_in_namepace(key, ('ansible', 'ansible_collections')): + del sys.modules[key] # only unload our own code since we know it's native Python + + @contextlib.contextmanager + def capture_output(capture): + """Capture sys.stdout and sys.stderr. + :type capture: Capture + """ + old_stdout = sys.stdout + old_stderr = sys.stderr + + sys.stdout = capture.stdout + sys.stderr = capture.stderr + + # clear all warnings registries to make all warnings available + for module in sys.modules.values(): + try: + module.__warningregistry__.clear() + except AttributeError: + pass + + with warnings.catch_warnings(): + warnings.simplefilter('error') + + try: + yield + finally: + sys.stdout = old_stdout + sys.stderr = old_stderr + + run() + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_data/sanity/import/yaml_to_json.py b/test/lib/ansible_test/_data/sanity/import/yaml_to_json.py new file mode 100644 index 00000000..09be9576 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/import/yaml_to_json.py @@ -0,0 +1,27 @@ +"""Read YAML from stdin and write JSON to stdout.""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import datetime +import json +import sys + +from yaml import load + +try: + from yaml import CSafeLoader as SafeLoader +except ImportError: + from yaml import SafeLoader + +# unique ISO date marker matching the one present in importer.py +ISO_DATE_MARKER = 'isodate:f23983df-f3df-453c-9904-bcd08af468cc:' + + +def default(value): + if isinstance(value, datetime.date): + return '%s%s' % (ISO_DATE_MARKER, value.isoformat()) + + raise TypeError('cannot serialize type: %s' % type(value)) + + +json.dump(load(sys.stdin, Loader=SafeLoader), sys.stdout, default=default) diff --git a/test/lib/ansible_test/_data/sanity/integration-aliases/yaml_to_json.py b/test/lib/ansible_test/_data/sanity/integration-aliases/yaml_to_json.py new file mode 100644 index 00000000..74a45f00 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/integration-aliases/yaml_to_json.py @@ -0,0 +1,15 @@ +"""Read YAML from stdin and write JSON to stdout.""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import json +import sys + +from yaml import load + +try: + from yaml import CSafeLoader as SafeLoader +except ImportError: + from yaml import SafeLoader + +json.dump(load(sys.stdin, Loader=SafeLoader), sys.stdout) diff --git a/test/lib/ansible_test/_data/sanity/pep8/current-ignore.txt b/test/lib/ansible_test/_data/sanity/pep8/current-ignore.txt new file mode 100644 index 00000000..659c7f59 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/pep8/current-ignore.txt @@ -0,0 +1,4 @@ +E402 +W503 +W504 +E741 diff --git a/test/lib/ansible_test/_data/sanity/pslint/pslint.ps1 b/test/lib/ansible_test/_data/sanity/pslint/pslint.ps1 new file mode 100755 index 00000000..1ef2743a --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/pslint/pslint.ps1 @@ -0,0 +1,43 @@ +#!/usr/bin/env pwsh +#Requires -Version 6 +#Requires -Modules PSScriptAnalyzer, PSSA-PSCustomUseLiteralPath + +Set-StrictMode -Version 2.0 +$ErrorActionPreference = "Stop" +$WarningPreference = "Stop" + +# Until https://github.com/PowerShell/PSScriptAnalyzer/issues/1217 is fixed we need to import Pester if it's +# available. +if (Get-Module -Name Pester -ListAvailable -ErrorAction SilentlyContinue) { + Import-Module -Name Pester +} + +$LiteralPathRule = Import-Module -Name PSSA-PSCustomUseLiteralPath -PassThru +$LiteralPathRulePath = Join-Path -Path $LiteralPathRule.ModuleBase -ChildPath $LiteralPathRule.RootModule + +$PSSAParams = @{ + CustomRulePath = @($LiteralPathRulePath) + IncludeDefaultRules = $true + Setting = (Join-Path -Path $PSScriptRoot -ChildPath "settings.psd1") +} + +$Results = @() + +ForEach ($Path in $Args) { + $Retries = 3 + + Do { + Try { + $Results += Invoke-ScriptAnalyzer -Path $Path @PSSAParams 3> $null + $Retries = 0 + } + Catch { + If (--$Retries -le 0) { + Throw + } + } + } + Until ($Retries -le 0) +} + +ConvertTo-Json -InputObject $Results diff --git a/test/lib/ansible_test/_data/sanity/pslint/settings.psd1 b/test/lib/ansible_test/_data/sanity/pslint/settings.psd1 new file mode 100644 index 00000000..7646ec35 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/pslint/settings.psd1 @@ -0,0 +1,13 @@ +@{ + ExcludeRules=@( + 'PSUseOutputTypeCorrectly', + 'PSUseShouldProcessForStateChangingFunctions', + # We send strings as plaintext so will always come across the 3 issues + 'PSAvoidUsingPlainTextForPassword', + 'PSAvoidUsingConvertToSecureStringWithPlainText', + 'PSAvoidUsingUserNameAndPassWordParams', + # We send the module as a base64 encoded string and a BOM will cause + # issues here + 'PSUseBOMForUnicodeEncodedFile' + ) +} diff --git a/test/lib/ansible_test/_data/sanity/pylint/config/ansible-test.cfg b/test/lib/ansible_test/_data/sanity/pylint/config/ansible-test.cfg new file mode 100644 index 00000000..d3643162 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/pylint/config/ansible-test.cfg @@ -0,0 +1,39 @@ +[MESSAGES CONTROL] + +disable= + cyclic-import, # consistent results require running with --jobs 1 and testing all files + duplicate-code, # consistent results require running with --jobs 1 and testing all files + too-few-public-methods, + too-many-arguments, + too-many-branches, + too-many-instance-attributes, + too-many-lines, + too-many-locals, + too-many-nested-blocks, + too-many-return-statements, + too-many-statements, + no-self-use, + unused-import, # pylint does not understand PEP 484 type hints + consider-using-dict-comprehension, # requires Python 2.6, which we still support + consider-using-set-comprehension, # requires Python 2.6, which we still support + +[BASIC] + +bad-names=foo, + bar, + baz, + toto, + tutu, + tata, + _, + +good-names=i, + j, + k, + ex, + Run, + C, + __metaclass__, + +method-rgx=[a-z_][a-z0-9_]{2,40}$ +function-rgx=[a-z_][a-z0-9_]{2,40}$ diff --git a/test/lib/ansible_test/_data/sanity/pylint/config/collection.cfg b/test/lib/ansible_test/_data/sanity/pylint/config/collection.cfg new file mode 100644 index 00000000..c2d75b1c --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/pylint/config/collection.cfg @@ -0,0 +1,135 @@ +[MESSAGES CONTROL] + +disable= + abstract-method, + access-member-before-definition, + arguments-differ, + assignment-from-no-return, + assignment-from-none, + attribute-defined-outside-init, + bad-continuation, + bad-indentation, + bad-mcs-classmethod-argument, + broad-except, + c-extension-no-member, + cell-var-from-loop, + chained-comparison, + comparison-with-callable, + consider-iterating-dictionary, + consider-merging-isinstance, + consider-using-dict-comprehension, + consider-using-enumerate, + consider-using-get, + consider-using-in, + consider-using-set-comprehension, + consider-using-ternary, + cyclic-import, # consistent results require running with --jobs 1 and testing all files + deprecated-lambda, + deprecated-method, + deprecated-module, + duplicate-code, # consistent results require running with --jobs 1 and testing all files + eval-used, + exec-used, + expression-not-assigned, + fixme, + function-redefined, + global-statement, + global-variable-undefined, + import-error, + import-self, + inconsistent-return-statements, + invalid-envvar-default, + invalid-name, + invalid-sequence-index, + keyword-arg-before-vararg, + len-as-condition, + line-too-long, + literal-comparison, + locally-disabled, + method-hidden, + misplaced-comparison-constant, + missing-docstring, + no-else-raise, + no-else-return, + no-init, + no-member, + no-name-in-module, + no-self-use, + no-value-for-parameter, + non-iterator-returned, + not-a-mapping, + not-an-iterable, + not-callable, + old-style-class, + pointless-statement, + pointless-string-statement, + possibly-unused-variable, + protected-access, + redefined-argument-from-local, + redefined-builtin, + redefined-outer-name, + redefined-variable-type, + reimported, + relative-beyond-top-level, # https://github.com/PyCQA/pylint/issues/2967 + signature-differs, + simplifiable-if-expression, + simplifiable-if-statement, + subprocess-popen-preexec-fn, + super-init-not-called, + superfluous-parens, + too-few-public-methods, + too-many-ancestors, + too-many-arguments, + too-many-boolean-expressions, + too-many-branches, + too-many-function-args, + too-many-instance-attributes, + too-many-lines, + too-many-locals, + too-many-nested-blocks, + too-many-public-methods, + too-many-return-statements, + too-many-statements, + trailing-comma-tuple, + trailing-comma-tuple, + try-except-raise, + unbalanced-tuple-unpacking, + undefined-loop-variable, + unexpected-keyword-arg, + ungrouped-imports, + unidiomatic-typecheck, + unnecessary-pass, + unsubscriptable-object, + unsupported-assignment-operation, + unsupported-delete-operation, + unsupported-membership-test, + unused-argument, + unused-import, + unused-variable, + used-before-assignment, + useless-object-inheritance, + useless-return, + useless-super-delegation, + wrong-import-order, + wrong-import-position, + +[BASIC] + +bad-names=foo, + bar, + baz, + toto, + tutu, + tata, + _, + +good-names=i, + j, + k, + ex, + Run, + +[TYPECHECK] + +ignored-modules= + _MovedItems, diff --git a/test/lib/ansible_test/_data/sanity/pylint/config/default.cfg b/test/lib/ansible_test/_data/sanity/pylint/config/default.cfg new file mode 100644 index 00000000..45199078 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/pylint/config/default.cfg @@ -0,0 +1,135 @@ +[MESSAGES CONTROL] + +disable= + abstract-method, + access-member-before-definition, + arguments-differ, + assignment-from-no-return, + assignment-from-none, + attribute-defined-outside-init, + bad-continuation, + bad-indentation, + bad-mcs-classmethod-argument, + broad-except, + c-extension-no-member, + cell-var-from-loop, + chained-comparison, + comparison-with-callable, + consider-iterating-dictionary, + consider-merging-isinstance, + consider-using-dict-comprehension, + consider-using-enumerate, + consider-using-get, + consider-using-in, + consider-using-set-comprehension, + consider-using-ternary, + cyclic-import, # consistent results require running with --jobs 1 and testing all files + deprecated-lambda, + deprecated-method, + deprecated-module, + duplicate-code, # consistent results require running with --jobs 1 and testing all files + eval-used, + exec-used, + expression-not-assigned, + fixme, + function-redefined, + global-statement, + global-variable-undefined, + import-error, + import-self, + inconsistent-return-statements, + invalid-envvar-default, + invalid-name, + invalid-sequence-index, + keyword-arg-before-vararg, + len-as-condition, + line-too-long, + literal-comparison, + locally-disabled, + method-hidden, + misplaced-comparison-constant, + missing-docstring, + no-else-raise, + no-else-return, + no-init, + no-member, + no-name-in-module, + no-self-use, + no-value-for-parameter, + non-iterator-returned, + not-a-mapping, + not-an-iterable, + not-callable, + old-style-class, + pointless-statement, + pointless-string-statement, + possibly-unused-variable, + protected-access, + redefined-argument-from-local, + redefined-builtin, + redefined-outer-name, + redefined-variable-type, + reimported, + relative-import, + signature-differs, + simplifiable-if-expression, + simplifiable-if-statement, + subprocess-popen-preexec-fn, + super-init-not-called, + superfluous-parens, + too-few-public-methods, + too-many-ancestors, + too-many-arguments, + too-many-boolean-expressions, + too-many-branches, + too-many-function-args, + too-many-instance-attributes, + too-many-lines, + too-many-locals, + too-many-nested-blocks, + too-many-public-methods, + too-many-return-statements, + too-many-statements, + trailing-comma-tuple, + trailing-comma-tuple, + try-except-raise, + unbalanced-tuple-unpacking, + undefined-loop-variable, + unexpected-keyword-arg, + ungrouped-imports, + unidiomatic-typecheck, + unnecessary-pass, + unsubscriptable-object, + unsupported-assignment-operation, + unsupported-delete-operation, + unsupported-membership-test, + unused-argument, + unused-import, + unused-variable, + used-before-assignment, + useless-object-inheritance, + useless-return, + useless-super-delegation, + wrong-import-order, + wrong-import-position, + +[BASIC] + +bad-names=foo, + bar, + baz, + toto, + tutu, + tata, + _, + +good-names=i, + j, + k, + ex, + Run, + +[TYPECHECK] + +ignored-modules= + _MovedItems, diff --git a/test/lib/ansible_test/_data/sanity/pylint/config/sanity.cfg b/test/lib/ansible_test/_data/sanity/pylint/config/sanity.cfg new file mode 100644 index 00000000..f601ab57 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/pylint/config/sanity.cfg @@ -0,0 +1,42 @@ +[MESSAGES CONTROL] + +disable= + cyclic-import, # consistent results require running with --jobs 1 and testing all files + duplicate-code, # consistent results require running with --jobs 1 and testing all files + too-few-public-methods, + too-many-arguments, + too-many-branches, + too-many-instance-attributes, + too-many-lines, + too-many-locals, + too-many-nested-blocks, + too-many-return-statements, + too-many-statements, + missing-docstring, + unused-import, # pylint does not understand PEP 484 type hints + consider-using-dict-comprehension, # requires Python 2.6, which we still support + consider-using-set-comprehension, # requires Python 2.6, which we still support + +[BASIC] + +bad-names=foo, + bar, + baz, + toto, + tutu, + tata, + _, + +good-names=i, + j, + k, + f, + e, + ex, + Run, + C, + __metaclass__, + +module-rgx=[a-z_][a-z0-9_-]{2,40}$ +method-rgx=[a-z_][a-z0-9_]{2,40}$ +function-rgx=[a-z_][a-z0-9_]{2,40}$ diff --git a/test/lib/ansible_test/_data/sanity/pylint/plugins/deprecated.py b/test/lib/ansible_test/_data/sanity/pylint/plugins/deprecated.py new file mode 100644 index 00000000..c06059c4 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/pylint/plugins/deprecated.py @@ -0,0 +1,250 @@ +# (c) 2018, Matt Martz <matt@sivel.net> +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) +# -*- coding: utf-8 -*- +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import datetime +import re + +from distutils.version import LooseVersion + +import astroid + +from pylint.interfaces import IAstroidChecker +from pylint.checkers import BaseChecker +from pylint.checkers.utils import check_messages + +from ansible.module_utils.six import string_types +from ansible.release import __version__ as ansible_version_raw +from ansible.utils.version import SemanticVersion + +MSGS = { + 'E9501': ("Deprecated version (%r) found in call to Display.deprecated " + "or AnsibleModule.deprecate", + "ansible-deprecated-version", + "Used when a call to Display.deprecated specifies a version " + "less than or equal to the current version of Ansible", + {'minversion': (2, 6)}), + 'E9502': ("Display.deprecated call without a version or date", + "ansible-deprecated-no-version", + "Used when a call to Display.deprecated does not specify a " + "version or date", + {'minversion': (2, 6)}), + 'E9503': ("Invalid deprecated version (%r) found in call to " + "Display.deprecated or AnsibleModule.deprecate", + "ansible-invalid-deprecated-version", + "Used when a call to Display.deprecated specifies an invalid " + "Ansible version number", + {'minversion': (2, 6)}), + 'E9504': ("Deprecated version (%r) found in call to Display.deprecated " + "or AnsibleModule.deprecate", + "collection-deprecated-version", + "Used when a call to Display.deprecated specifies a collection " + "version less than or equal to the current version of this " + "collection", + {'minversion': (2, 6)}), + 'E9505': ("Invalid deprecated version (%r) found in call to " + "Display.deprecated or AnsibleModule.deprecate", + "collection-invalid-deprecated-version", + "Used when a call to Display.deprecated specifies an invalid " + "collection version number", + {'minversion': (2, 6)}), + 'E9506': ("No collection name found in call to Display.deprecated or " + "AnsibleModule.deprecate", + "ansible-deprecated-no-collection-name", + "The current collection name in format `namespace.name` must " + "be provided as collection_name when calling Display.deprecated " + "or AnsibleModule.deprecate (`ansible.builtin` for ansible-base)", + {'minversion': (2, 6)}), + 'E9507': ("Wrong collection name (%r) found in call to " + "Display.deprecated or AnsibleModule.deprecate", + "wrong-collection-deprecated", + "The name of the current collection must be passed to the " + "Display.deprecated resp. AnsibleModule.deprecate calls " + "(`ansible.builtin` for ansible-base)", + {'minversion': (2, 6)}), + 'E9508': ("Expired date (%r) found in call to Display.deprecated " + "or AnsibleModule.deprecate", + "ansible-deprecated-date", + "Used when a call to Display.deprecated specifies a date " + "before today", + {'minversion': (2, 6)}), + 'E9509': ("Invalid deprecated date (%r) found in call to " + "Display.deprecated or AnsibleModule.deprecate", + "ansible-invalid-deprecated-date", + "Used when a call to Display.deprecated specifies an invalid " + "date. It must be a string in format `YYYY-MM-DD` (ISO 8601)", + {'minversion': (2, 6)}), + 'E9510': ("Both version and date found in call to " + "Display.deprecated or AnsibleModule.deprecate", + "ansible-deprecated-both-version-and-date", + "Only one of version and date must be specified", + {'minversion': (2, 6)}), +} + + +ANSIBLE_VERSION = LooseVersion('.'.join(ansible_version_raw.split('.')[:3])) + + +def _get_expr_name(node): + """Funciton to get either ``attrname`` or ``name`` from ``node.func.expr`` + + Created specifically for the case of ``display.deprecated`` or ``self._display.deprecated`` + """ + try: + return node.func.expr.attrname + except AttributeError: + # If this fails too, we'll let it raise, the caller should catch it + return node.func.expr.name + + +def parse_isodate(value): + msg = 'Expected ISO 8601 date string (YYYY-MM-DD)' + if not isinstance(value, string_types): + raise ValueError(msg) + # From Python 3.7 in, there is datetime.date.fromisoformat(). For older versions, + # we have to do things manually. + if not re.match('^[0-9]{4}-[0-9]{2}-[0-9]{2}$', value): + raise ValueError(msg) + try: + return datetime.datetime.strptime(value, '%Y-%m-%d').date() + except ValueError: + raise ValueError(msg) + + +class AnsibleDeprecatedChecker(BaseChecker): + """Checks for Display.deprecated calls to ensure that the ``version`` + has not passed or met the time for removal + """ + + __implements__ = (IAstroidChecker,) + name = 'deprecated' + msgs = MSGS + + options = ( + ('collection-name', { + 'default': None, + 'type': 'string', + 'metavar': '<name>', + 'help': 'The collection\'s name used to check collection names in deprecations.', + }), + ('collection-version', { + 'default': None, + 'type': 'string', + 'metavar': '<version>', + 'help': 'The collection\'s version number used to check deprecations.', + }), + ) + + def __init__(self, *args, **kwargs): + self.collection_version = None + self.collection_name = None + super(AnsibleDeprecatedChecker, self).__init__(*args, **kwargs) + + def set_option(self, optname, value, action=None, optdict=None): + super(AnsibleDeprecatedChecker, self).set_option(optname, value, action, optdict) + if optname == 'collection-version' and value is not None: + self.collection_version = SemanticVersion(self.config.collection_version) + if optname == 'collection-name' and value is not None: + self.collection_name = self.config.collection_name + + def _check_date(self, node, date): + if not isinstance(date, str): + self.add_message('invalid-date', node=node, args=(date,)) + return + + try: + date_parsed = parse_isodate(date) + except ValueError: + self.add_message('ansible-invalid-deprecated-date', node=node, args=(date,)) + return + + if date_parsed < datetime.date.today(): + self.add_message('ansible-deprecated-date', node=node, args=(date,)) + + def _check_version(self, node, version, collection_name): + if not isinstance(version, (str, float)): + self.add_message('invalid-version', node=node, args=(version,)) + return + + version_no = str(version) + + if collection_name == 'ansible.builtin': + # Ansible-base + try: + if not version_no: + raise ValueError('Version string should not be empty') + loose_version = LooseVersion(str(version_no)) + if ANSIBLE_VERSION >= loose_version: + self.add_message('ansible-deprecated-version', node=node, args=(version,)) + except ValueError: + self.add_message('ansible-invalid-deprecated-version', node=node, args=(version,)) + elif collection_name: + # Collections + try: + if not version_no: + raise ValueError('Version string should not be empty') + semantic_version = SemanticVersion(version_no) + if collection_name == self.collection_name and self.collection_version is not None: + if self.collection_version >= semantic_version: + self.add_message('collection-deprecated-version', node=node, args=(version,)) + except ValueError: + self.add_message('collection-invalid-deprecated-version', node=node, args=(version,)) + + @check_messages(*(MSGS.keys())) + def visit_call(self, node): + version = None + date = None + collection_name = None + try: + if (node.func.attrname == 'deprecated' and 'display' in _get_expr_name(node) or + node.func.attrname == 'deprecate' and _get_expr_name(node)): + if node.keywords: + for keyword in node.keywords: + if len(node.keywords) == 1 and keyword.arg is None: + # This is likely a **kwargs splat + return + if keyword.arg == 'version': + if isinstance(keyword.value.value, astroid.Name): + # This is likely a variable + return + version = keyword.value.value + if keyword.arg == 'date': + if isinstance(keyword.value.value, astroid.Name): + # This is likely a variable + return + date = keyword.value.value + if keyword.arg == 'collection_name': + if isinstance(keyword.value.value, astroid.Name): + # This is likely a variable + return + collection_name = keyword.value.value + if not version and not date: + try: + version = node.args[1].value + except IndexError: + self.add_message('ansible-deprecated-no-version', node=node) + return + if version and date: + self.add_message('ansible-deprecated-both-version-and-date', node=node) + + if collection_name: + this_collection = collection_name == (self.collection_name or 'ansible.builtin') + if not this_collection: + self.add_message('wrong-collection-deprecated', node=node, args=(collection_name,)) + elif self.collection_name is not None: + self.add_message('ansible-deprecated-no-collection-name', node=node) + + if date: + self._check_date(node, date) + elif version: + self._check_version(node, version, collection_name) + except AttributeError: + # Not the type of node we are interested in + pass + + +def register(linter): + """required method to auto register this checker """ + linter.register_checker(AnsibleDeprecatedChecker(linter)) diff --git a/test/lib/ansible_test/_data/sanity/pylint/plugins/string_format.py b/test/lib/ansible_test/_data/sanity/pylint/plugins/string_format.py new file mode 100644 index 00000000..eafde73b --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/pylint/plugins/string_format.py @@ -0,0 +1,90 @@ +# (c) 2018, Matt Martz <matt@sivel.net> +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) +# -*- coding: utf-8 -*- +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import sys + +import six + +import astroid +from pylint.interfaces import IAstroidChecker +from pylint.checkers import BaseChecker +from pylint.checkers import utils +from pylint.checkers.utils import check_messages +try: + from pylint.checkers.utils import parse_format_method_string +except ImportError: + # noinspection PyUnresolvedReferences + from pylint.checkers.strings import parse_format_method_string + +_PY3K = sys.version_info[:2] >= (3, 0) + +MSGS = { + 'E9305': ("Format string contains automatic field numbering " + "specification", + "ansible-format-automatic-specification", + "Used when a PEP 3101 format string contains automatic " + "field numbering (e.g. '{}').", + {'minversion': (2, 6)}), + 'E9390': ("bytes object has no .format attribute", + "ansible-no-format-on-bytestring", + "Used when a bytestring was used as a PEP 3101 format string " + "as Python3 bytestrings do not have a .format attribute", + {'minversion': (3, 0)}), +} + + +class AnsibleStringFormatChecker(BaseChecker): + """Checks string formatting operations to ensure that the format string + is valid and the arguments match the format string. + """ + + __implements__ = (IAstroidChecker,) + name = 'string' + msgs = MSGS + + @check_messages(*(MSGS.keys())) + def visit_call(self, node): + func = utils.safe_infer(node.func) + if (isinstance(func, astroid.BoundMethod) + and isinstance(func.bound, astroid.Instance) + and func.bound.name in ('str', 'unicode', 'bytes')): + if func.name == 'format': + self._check_new_format(node, func) + + def _check_new_format(self, node, func): + """ Check the new string formatting """ + if (isinstance(node.func, astroid.Attribute) + and not isinstance(node.func.expr, astroid.Const)): + return + try: + strnode = next(func.bound.infer()) + except astroid.InferenceError: + return + if not isinstance(strnode, astroid.Const): + return + + if _PY3K and isinstance(strnode.value, six.binary_type): + self.add_message('ansible-no-format-on-bytestring', node=node) + return + if not isinstance(strnode.value, six.string_types): + return + + if node.starargs or node.kwargs: + return + try: + num_args = parse_format_method_string(strnode.value)[1] + except utils.IncompleteFormatString: + return + + if num_args: + self.add_message('ansible-format-automatic-specification', + node=node) + return + + +def register(linter): + """required method to auto register this checker """ + linter.register_checker(AnsibleStringFormatChecker(linter)) diff --git a/test/lib/ansible_test/_data/sanity/pylint/plugins/unwanted.py b/test/lib/ansible_test/_data/sanity/pylint/plugins/unwanted.py new file mode 100644 index 00000000..7012feaa --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/pylint/plugins/unwanted.py @@ -0,0 +1,242 @@ +"""A plugin for pylint to identify imports and functions which should not be used.""" +from __future__ import (absolute_import, division, print_function) + +__metaclass__ = type + +import os + +import astroid + +from pylint.checkers import BaseChecker +from pylint.interfaces import IAstroidChecker + +ANSIBLE_TEST_MODULES_PATH = os.environ['ANSIBLE_TEST_MODULES_PATH'] +ANSIBLE_TEST_MODULE_UTILS_PATH = os.environ['ANSIBLE_TEST_MODULE_UTILS_PATH'] + + +class UnwantedEntry: + """Defines an unwanted import.""" + def __init__(self, alternative, modules_only=False, names=None, ignore_paths=None): + """ + :type alternative: str + :type modules_only: bool + :type names: tuple[str] | None + :type ignore_paths: tuple[str] | None + """ + self.alternative = alternative + self.modules_only = modules_only + self.names = set(names) if names else set() + self.ignore_paths = ignore_paths + + def applies_to(self, path, name=None): + """ + :type path: str + :type name: str | None + :rtype: bool + """ + if self.names: + if not name: + return False + + if name not in self.names: + return False + + if self.ignore_paths and any(path.endswith(ignore_path) for ignore_path in self.ignore_paths): + return False + + if self.modules_only: + return is_module_path(path) + + return True + + +def is_module_path(path): + """ + :type path: str + :rtype: bool + """ + return path.startswith(ANSIBLE_TEST_MODULES_PATH) or path.startswith(ANSIBLE_TEST_MODULE_UTILS_PATH) + + +class AnsibleUnwantedChecker(BaseChecker): + """Checker for unwanted imports and functions.""" + __implements__ = (IAstroidChecker,) + + name = 'unwanted' + + BAD_IMPORT = 'ansible-bad-import' + BAD_IMPORT_FROM = 'ansible-bad-import-from' + BAD_FUNCTION = 'ansible-bad-function' + BAD_MODULE_IMPORT = 'ansible-bad-module-import' + + msgs = dict( + E5101=('Import %s instead of %s', + BAD_IMPORT, + 'Identifies imports which should not be used.'), + E5102=('Import %s from %s instead of %s', + BAD_IMPORT_FROM, + 'Identifies imports which should not be used.'), + E5103=('Call %s instead of %s', + BAD_FUNCTION, + 'Identifies functions which should not be used.'), + E5104=('Import external package or ansible.module_utils not %s', + BAD_MODULE_IMPORT, + 'Identifies imports which should not be used.'), + ) + + unwanted_imports = dict( + # Additional imports that we may want to start checking: + # boto=UnwantedEntry('boto3', modules_only=True), + # requests=UnwantedEntry('ansible.module_utils.urls', modules_only=True), + # urllib=UnwantedEntry('ansible.module_utils.urls', modules_only=True), + + # see https://docs.python.org/2/library/urllib2.html + urllib2=UnwantedEntry('ansible.module_utils.urls', + ignore_paths=( + '/lib/ansible/module_utils/urls.py', + )), + + # see https://docs.python.org/3.7/library/collections.abc.html + collections=UnwantedEntry('ansible.module_utils.common._collections_compat', + ignore_paths=( + '/lib/ansible/module_utils/common/_collections_compat.py', + ), + names=( + 'MappingView', + 'ItemsView', + 'KeysView', + 'ValuesView', + 'Mapping', 'MutableMapping', + 'Sequence', 'MutableSequence', + 'Set', 'MutableSet', + 'Container', + 'Hashable', + 'Sized', + 'Callable', + 'Iterable', + 'Iterator', + )), + ) + + unwanted_functions = { + # see https://docs.python.org/2/library/tempfile.html#tempfile.mktemp + 'tempfile.mktemp': UnwantedEntry('tempfile.mkstemp'), + + 'sys.exit': UnwantedEntry('exit_json or fail_json', + ignore_paths=( + '/lib/ansible/module_utils/basic.py', + '/lib/ansible/modules/async_wrapper.py', + '/lib/ansible/module_utils/common/removed.py', + ), + modules_only=True), + + 'builtins.print': UnwantedEntry('module.log or module.debug', + ignore_paths=( + '/lib/ansible/module_utils/basic.py', + '/lib/ansible/module_utils/common/removed.py', + ), + modules_only=True), + } + + def visit_import(self, node): + """ + :type node: astroid.node_classes.Import + """ + for name in node.names: + self._check_import(node, name[0]) + + def visit_importfrom(self, node): + """ + :type node: astroid.node_classes.ImportFrom + """ + self._check_importfrom(node, node.modname, node.names) + + def visit_attribute(self, node): + """ + :type node: astroid.node_classes.Attribute + """ + last_child = node.last_child() + + # this is faster than using type inference and will catch the most common cases + if not isinstance(last_child, astroid.node_classes.Name): + return + + module = last_child.name + + entry = self.unwanted_imports.get(module) + + if entry and entry.names: + if entry.applies_to(self.linter.current_file, node.attrname): + self.add_message(self.BAD_IMPORT_FROM, args=(node.attrname, entry.alternative, module), node=node) + + def visit_call(self, node): + """ + :type node: astroid.node_classes.Call + """ + try: + for i in node.func.inferred(): + func = None + + if isinstance(i, astroid.scoped_nodes.FunctionDef) and isinstance(i.parent, astroid.scoped_nodes.Module): + func = '%s.%s' % (i.parent.name, i.name) + + if not func: + continue + + entry = self.unwanted_functions.get(func) + + if entry and entry.applies_to(self.linter.current_file): + self.add_message(self.BAD_FUNCTION, args=(entry.alternative, func), node=node) + except astroid.exceptions.InferenceError: + pass + + def _check_import(self, node, modname): + """ + :type node: astroid.node_classes.Import + :type modname: str + """ + self._check_module_import(node, modname) + + entry = self.unwanted_imports.get(modname) + + if not entry: + return + + if entry.applies_to(self.linter.current_file): + self.add_message(self.BAD_IMPORT, args=(entry.alternative, modname), node=node) + + def _check_importfrom(self, node, modname, names): + """ + :type node: astroid.node_classes.ImportFrom + :type modname: str + :type names: list[str[ + """ + self._check_module_import(node, modname) + + entry = self.unwanted_imports.get(modname) + + if not entry: + return + + for name in names: + if entry.applies_to(self.linter.current_file, name[0]): + self.add_message(self.BAD_IMPORT_FROM, args=(name[0], entry.alternative, modname), node=node) + + def _check_module_import(self, node, modname): + """ + :type node: astroid.node_classes.Import | astroid.node_classes.ImportFrom + :type modname: str + """ + if not is_module_path(self.linter.current_file): + return + + if modname == 'ansible.module_utils' or modname.startswith('ansible.module_utils.'): + return + + if modname == 'ansible' or modname.startswith('ansible.'): + self.add_message(self.BAD_MODULE_IMPORT, args=(modname,), node=node) + + +def register(linter): + """required method to auto register this checker """ + linter.register_checker(AnsibleUnwantedChecker(linter)) diff --git a/test/lib/ansible_test/_data/sanity/rstcheck/ignore-substitutions.txt b/test/lib/ansible_test/_data/sanity/rstcheck/ignore-substitutions.txt new file mode 100644 index 00000000..961e9bd9 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/rstcheck/ignore-substitutions.txt @@ -0,0 +1,5 @@ +version +release +today +br +_ diff --git a/test/lib/ansible_test/_data/sanity/shellcheck/exclude.txt b/test/lib/ansible_test/_data/sanity/shellcheck/exclude.txt new file mode 100644 index 00000000..29588ddd --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/shellcheck/exclude.txt @@ -0,0 +1,3 @@ +SC1090 +SC1091 +SC2164 diff --git a/test/lib/ansible_test/_data/sanity/validate-modules/main.py b/test/lib/ansible_test/_data/sanity/validate-modules/main.py new file mode 100755 index 00000000..c1e2bdaa --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/validate-modules/main.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +from validate_modules.main import main + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_data/sanity/validate-modules/validate-modules b/test/lib/ansible_test/_data/sanity/validate-modules/validate-modules new file mode 120000 index 00000000..11a5d8e1 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/validate-modules/validate-modules @@ -0,0 +1 @@ +main.py
\ No newline at end of file diff --git a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/__init__.py b/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/__init__.py new file mode 100644 index 00000000..d8ff2dc0 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2015 Matt Martz <matt@sivel.net> +# Copyright (C) 2015 Rackspace US, Inc. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type +__version__ = '0.0.1b' diff --git a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/main.py b/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/main.py new file mode 100644 index 00000000..e7379288 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/main.py @@ -0,0 +1,2444 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2015 Matt Martz <matt@sivel.net> +# Copyright (C) 2015 Rackspace US, Inc. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import abc +import argparse +import ast +import datetime +import json +import errno +import os +import re +import subprocess +import sys +import tempfile +import traceback + +from collections import OrderedDict +from contextlib import contextmanager +from distutils.version import StrictVersion, LooseVersion +from fnmatch import fnmatch + +import yaml + +from ansible import __version__ as ansible_version +from ansible.executor.module_common import REPLACER_WINDOWS +from ansible.module_utils.common._collections_compat import Mapping +from ansible.module_utils._text import to_native +from ansible.plugins.loader import fragment_loader +from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder +from ansible.utils.plugin_docs import BLACKLIST, add_collection_to_versions_and_dates, add_fragments, get_docstring +from ansible.utils.version import SemanticVersion + +from .module_args import AnsibleModuleImportError, AnsibleModuleNotInitialized, get_argument_spec + +from .schema import ansible_module_kwargs_schema, doc_schema, return_schema + +from .utils import CaptureStd, NoArgsAnsibleModule, compare_unordered_lists, is_empty, parse_yaml, parse_isodate +from voluptuous.humanize import humanize_error + +from ansible.module_utils.six import PY3, with_metaclass, string_types + +if PY3: + # Because there is no ast.TryExcept in Python 3 ast module + TRY_EXCEPT = ast.Try + # REPLACER_WINDOWS from ansible.executor.module_common is byte + # string but we need unicode for Python 3 + REPLACER_WINDOWS = REPLACER_WINDOWS.decode('utf-8') +else: + TRY_EXCEPT = ast.TryExcept + +BLACKLIST_DIRS = frozenset(('.git', 'test', '.github', '.idea')) +INDENT_REGEX = re.compile(r'([\t]*)') +TYPE_REGEX = re.compile(r'.*(if|or)(\s+[^"\']*|\s+)(?<!_)(?<!str\()type\([^)].*') +SYS_EXIT_REGEX = re.compile(r'[^#]*sys.exit\s*\(.*') +BLACKLIST_IMPORTS = { + 'requests': { + 'new_only': True, + 'error': { + 'code': 'use-module-utils-urls', + 'msg': ('requests import found, should use ' + 'ansible.module_utils.urls instead') + } + }, + r'boto(?:\.|$)': { + 'new_only': True, + 'error': { + 'code': 'use-boto3', + 'msg': 'boto import found, new modules should use boto3' + } + }, +} +SUBPROCESS_REGEX = re.compile(r'subprocess\.Po.*') +OS_CALL_REGEX = re.compile(r'os\.call.*') + + +LOOSE_ANSIBLE_VERSION = LooseVersion('.'.join(ansible_version.split('.')[:3])) + + +def compare_dates(d1, d2): + try: + date1 = parse_isodate(d1, allow_date=True) + date2 = parse_isodate(d2, allow_date=True) + return date1 == date2 + except ValueError: + # At least one of d1 and d2 cannot be parsed. Simply compare values. + return d1 == d2 + + +class ReporterEncoder(json.JSONEncoder): + def default(self, o): + if isinstance(o, Exception): + return str(o) + + return json.JSONEncoder.default(self, o) + + +class Reporter: + def __init__(self): + self.files = OrderedDict() + + def _ensure_default_entry(self, path): + try: + self.files[path] + except KeyError: + self.files[path] = { + 'errors': [], + 'warnings': [], + 'traces': [], + 'warning_traces': [] + } + + def _log(self, path, code, msg, level='error', line=0, column=0): + self._ensure_default_entry(path) + lvl_dct = self.files[path]['%ss' % level] + lvl_dct.append({ + 'code': code, + 'msg': msg, + 'line': line, + 'column': column + }) + + def error(self, *args, **kwargs): + self._log(*args, level='error', **kwargs) + + def warning(self, *args, **kwargs): + self._log(*args, level='warning', **kwargs) + + def trace(self, path, tracebk): + self._ensure_default_entry(path) + self.files[path]['traces'].append(tracebk) + + def warning_trace(self, path, tracebk): + self._ensure_default_entry(path) + self.files[path]['warning_traces'].append(tracebk) + + @staticmethod + @contextmanager + def _output_handle(output): + if output != '-': + handle = open(output, 'w+') + else: + handle = sys.stdout + + yield handle + + handle.flush() + handle.close() + + @staticmethod + def _filter_out_ok(reports): + temp_reports = OrderedDict() + for path, report in reports.items(): + if report['errors'] or report['warnings']: + temp_reports[path] = report + + return temp_reports + + def plain(self, warnings=False, output='-'): + """Print out the test results in plain format + + output is ignored here for now + """ + ret = [] + + for path, report in Reporter._filter_out_ok(self.files).items(): + traces = report['traces'][:] + if warnings and report['warnings']: + traces.extend(report['warning_traces']) + + for trace in traces: + print('TRACE:') + print('\n '.join((' %s' % trace).splitlines())) + for error in report['errors']: + error['path'] = path + print('%(path)s:%(line)d:%(column)d: E%(code)s %(msg)s' % error) + ret.append(1) + if warnings: + for warning in report['warnings']: + warning['path'] = path + print('%(path)s:%(line)d:%(column)d: W%(code)s %(msg)s' % warning) + + return 3 if ret else 0 + + def json(self, warnings=False, output='-'): + """Print out the test results in json format + + warnings is not respected in this output + """ + ret = [len(r['errors']) for r in self.files.values()] + + with Reporter._output_handle(output) as handle: + print(json.dumps(Reporter._filter_out_ok(self.files), indent=4, cls=ReporterEncoder), file=handle) + + return 3 if sum(ret) else 0 + + +class Validator(with_metaclass(abc.ABCMeta, object)): + """Validator instances are intended to be run on a single object. if you + are scanning multiple objects for problems, you'll want to have a separate + Validator for each one.""" + + def __init__(self, reporter=None): + self.reporter = reporter + + @abc.abstractproperty + def object_name(self): + """Name of the object we validated""" + pass + + @abc.abstractproperty + def object_path(self): + """Path of the object we validated""" + pass + + @abc.abstractmethod + def validate(self): + """Run this method to generate the test results""" + pass + + +class ModuleValidator(Validator): + BLACKLIST_PATTERNS = ('.git*', '*.pyc', '*.pyo', '.*', '*.md', '*.rst', '*.txt') + BLACKLIST_FILES = frozenset(('.git', '.gitignore', '.travis.yml', + 'shippable.yml', + '.gitattributes', '.gitmodules', 'COPYING', + '__init__.py', 'VERSION', 'test-docs.sh')) + BLACKLIST = BLACKLIST_FILES.union(BLACKLIST['MODULE']) + + PS_DOC_BLACKLIST = frozenset(( + 'async_status.ps1', + 'slurp.ps1', + 'setup.ps1' + )) + PS_ARG_VALIDATE_BLACKLIST = frozenset(( + 'win_dsc.ps1', # win_dsc is a dynamic arg spec, the docs won't ever match + )) + + WHITELIST_FUTURE_IMPORTS = frozenset(('absolute_import', 'division', 'print_function')) + + def __init__(self, path, analyze_arg_spec=False, collection=None, collection_version=None, + base_branch=None, git_cache=None, reporter=None, routing=None): + super(ModuleValidator, self).__init__(reporter=reporter or Reporter()) + + self.path = path + self.basename = os.path.basename(self.path) + self.name = os.path.splitext(self.basename)[0] + + self.analyze_arg_spec = analyze_arg_spec + + self._Version = LooseVersion + self._StrictVersion = StrictVersion + + self.collection = collection + self.collection_name = 'ansible.builtin' + if self.collection: + self._Version = SemanticVersion + self._StrictVersion = SemanticVersion + collection_namespace_path, collection_name = os.path.split(self.collection) + self.collection_name = '%s.%s' % (os.path.basename(collection_namespace_path), collection_name) + self.routing = routing + self.collection_version = None + if collection_version is not None: + self.collection_version_str = collection_version + self.collection_version = SemanticVersion(collection_version) + + self.base_branch = base_branch + self.git_cache = git_cache or GitCache() + + self._python_module_override = False + + with open(path) as f: + self.text = f.read() + self.length = len(self.text.splitlines()) + try: + self.ast = ast.parse(self.text) + except Exception: + self.ast = None + + if base_branch: + self.base_module = self._get_base_file() + else: + self.base_module = None + + def _create_version(self, v, collection_name=None): + if not v: + raise ValueError('Empty string is not a valid version') + if collection_name == 'ansible.builtin': + return LooseVersion(v) + if collection_name is not None: + return SemanticVersion(v) + return self._Version(v) + + def _create_strict_version(self, v, collection_name=None): + if not v: + raise ValueError('Empty string is not a valid version') + if collection_name == 'ansible.builtin': + return StrictVersion(v) + if collection_name is not None: + return SemanticVersion(v) + return self._StrictVersion(v) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + if not self.base_module: + return + + try: + os.remove(self.base_module) + except Exception: + pass + + @property + def object_name(self): + return self.basename + + @property + def object_path(self): + return self.path + + def _get_collection_meta(self): + """Implement if we need this for version_added comparisons + """ + pass + + def _python_module(self): + if self.path.endswith('.py') or self._python_module_override: + return True + return False + + def _powershell_module(self): + if self.path.endswith('.ps1'): + return True + return False + + def _just_docs(self): + """Module can contain just docs and from __future__ boilerplate + """ + try: + for child in self.ast.body: + if not isinstance(child, ast.Assign): + # allowed from __future__ imports + if isinstance(child, ast.ImportFrom) and child.module == '__future__': + for future_import in child.names: + if future_import.name not in self.WHITELIST_FUTURE_IMPORTS: + break + else: + continue + return False + return True + except AttributeError: + return False + + def _get_base_branch_module_path(self): + """List all paths within lib/ansible/modules to try and match a moved module""" + return self.git_cache.base_module_paths.get(self.object_name) + + def _has_alias(self): + """Return true if the module has any aliases.""" + return self.object_name in self.git_cache.head_aliased_modules + + def _get_base_file(self): + # In case of module moves, look for the original location + base_path = self._get_base_branch_module_path() + + command = ['git', 'show', '%s:%s' % (self.base_branch, base_path or self.path)] + p = subprocess.Popen(command, stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + stdout, stderr = p.communicate() + if int(p.returncode) != 0: + return None + + t = tempfile.NamedTemporaryFile(delete=False) + t.write(stdout) + t.close() + + return t.name + + def _is_new_module(self): + if self._has_alias(): + return False + + return not self.object_name.startswith('_') and bool(self.base_branch) and not bool(self.base_module) + + def _check_interpreter(self, powershell=False): + if powershell: + if not self.text.startswith('#!powershell\n'): + self.reporter.error( + path=self.object_path, + code='missing-powershell-interpreter', + msg='Interpreter line is not "#!powershell"' + ) + return + + if not self.text.startswith('#!/usr/bin/python'): + self.reporter.error( + path=self.object_path, + code='missing-python-interpreter', + msg='Interpreter line is not "#!/usr/bin/python"', + ) + + def _check_type_instead_of_isinstance(self, powershell=False): + if powershell: + return + for line_no, line in enumerate(self.text.splitlines()): + typekeyword = TYPE_REGEX.match(line) + if typekeyword: + # TODO: add column + self.reporter.error( + path=self.object_path, + code='unidiomatic-typecheck', + msg=('Type comparison using type() found. ' + 'Use isinstance() instead'), + line=line_no + 1 + ) + + def _check_for_sys_exit(self): + # Optimize out the happy path + if 'sys.exit' not in self.text: + return + + for line_no, line in enumerate(self.text.splitlines()): + sys_exit_usage = SYS_EXIT_REGEX.match(line) + if sys_exit_usage: + # TODO: add column + self.reporter.error( + path=self.object_path, + code='use-fail-json-not-sys-exit', + msg='sys.exit() call found. Should be exit_json/fail_json', + line=line_no + 1 + ) + + def _check_gpl3_header(self): + header = '\n'.join(self.text.split('\n')[:20]) + if ('GNU General Public License' not in header or + ('version 3' not in header and 'v3.0' not in header)): + self.reporter.error( + path=self.object_path, + code='missing-gplv3-license', + msg='GPLv3 license header not found in the first 20 lines of the module' + ) + elif self._is_new_module(): + if len([line for line in header + if 'GNU General Public License' in line]) > 1: + self.reporter.error( + path=self.object_path, + code='use-short-gplv3-license', + msg='Found old style GPLv3 license header: ' + 'https://docs.ansible.com/ansible/devel/dev_guide/developing_modules_documenting.html#copyright' + ) + + def _check_for_subprocess(self): + for child in self.ast.body: + if isinstance(child, ast.Import): + if child.names[0].name == 'subprocess': + for line_no, line in enumerate(self.text.splitlines()): + sp_match = SUBPROCESS_REGEX.search(line) + if sp_match: + self.reporter.error( + path=self.object_path, + code='use-run-command-not-popen', + msg=('subprocess.Popen call found. Should be module.run_command'), + line=(line_no + 1), + column=(sp_match.span()[0] + 1) + ) + + def _check_for_os_call(self): + if 'os.call' in self.text: + for line_no, line in enumerate(self.text.splitlines()): + os_call_match = OS_CALL_REGEX.search(line) + if os_call_match: + self.reporter.error( + path=self.object_path, + code='use-run-command-not-os-call', + msg=('os.call() call found. Should be module.run_command'), + line=(line_no + 1), + column=(os_call_match.span()[0] + 1) + ) + + def _find_blacklist_imports(self): + for child in self.ast.body: + names = [] + if isinstance(child, ast.Import): + names.extend(child.names) + elif isinstance(child, TRY_EXCEPT): + bodies = child.body + for handler in child.handlers: + bodies.extend(handler.body) + for grandchild in bodies: + if isinstance(grandchild, ast.Import): + names.extend(grandchild.names) + for name in names: + # TODO: Add line/col + for blacklist_import, options in BLACKLIST_IMPORTS.items(): + if re.search(blacklist_import, name.name): + new_only = options['new_only'] + if self._is_new_module() and new_only: + self.reporter.error( + path=self.object_path, + **options['error'] + ) + elif not new_only: + self.reporter.error( + path=self.object_path, + **options['error'] + ) + + def _find_module_utils(self, main): + linenos = [] + found_basic = False + for child in self.ast.body: + if isinstance(child, (ast.Import, ast.ImportFrom)): + names = [] + try: + names.append(child.module) + if child.module.endswith('.basic'): + found_basic = True + except AttributeError: + pass + names.extend([n.name for n in child.names]) + + if [n for n in names if n.startswith('ansible.module_utils')]: + linenos.append(child.lineno) + + for name in child.names: + if ('module_utils' in getattr(child, 'module', '') and + isinstance(name, ast.alias) and + name.name == '*'): + msg = ( + 'module-utils-specific-import', + ('module_utils imports should import specific ' + 'components, not "*"') + ) + if self._is_new_module(): + self.reporter.error( + path=self.object_path, + code=msg[0], + msg=msg[1], + line=child.lineno + ) + else: + self.reporter.warning( + path=self.object_path, + code=msg[0], + msg=msg[1], + line=child.lineno + ) + + if (isinstance(name, ast.alias) and + name.name == 'basic'): + found_basic = True + + if not found_basic: + self.reporter.warning( + path=self.object_path, + code='missing-module-utils-basic-import', + msg='Did not find "ansible.module_utils.basic" import' + ) + + return linenos + + def _get_first_callable(self): + linenos = [] + for child in self.ast.body: + if isinstance(child, (ast.FunctionDef, ast.ClassDef)): + linenos.append(child.lineno) + + return min(linenos) + + def _find_main_call(self, look_for="main"): + """ Ensure that the module ends with: + if __name__ == '__main__': + main() + OR, in the case of modules that are in the docs-only deprecation phase + if __name__ == '__main__': + removed_module() + """ + lineno = False + if_bodies = [] + for child in self.ast.body: + if isinstance(child, ast.If): + try: + if child.test.left.id == '__name__': + if_bodies.extend(child.body) + except AttributeError: + pass + + bodies = self.ast.body + bodies.extend(if_bodies) + + for child in bodies: + + # validate that the next to last line is 'if __name__ == "__main__"' + if child.lineno == (self.length - 1): + + mainchecked = False + try: + if isinstance(child, ast.If) and \ + child.test.left.id == '__name__' and \ + len(child.test.ops) == 1 and \ + isinstance(child.test.ops[0], ast.Eq) and \ + child.test.comparators[0].s == '__main__': + mainchecked = True + except Exception: + pass + + if not mainchecked: + self.reporter.error( + path=self.object_path, + code='missing-if-name-main', + msg='Next to last line should be: if __name__ == "__main__":', + line=child.lineno + ) + + # validate that the final line is a call to main() + if isinstance(child, ast.Expr): + if isinstance(child.value, ast.Call): + if (isinstance(child.value.func, ast.Name) and + child.value.func.id == look_for): + lineno = child.lineno + if lineno < self.length - 1: + self.reporter.error( + path=self.object_path, + code='last-line-main-call', + msg=('Call to %s() not the last line' % look_for), + line=lineno + ) + + if not lineno: + self.reporter.error( + path=self.object_path, + code='missing-main-call', + msg=('Did not find a call to %s()' % look_for) + ) + + return lineno or 0 + + def _find_has_import(self): + for child in self.ast.body: + found_try_except_import = False + found_has = False + if isinstance(child, TRY_EXCEPT): + bodies = child.body + for handler in child.handlers: + bodies.extend(handler.body) + for grandchild in bodies: + if isinstance(grandchild, ast.Import): + found_try_except_import = True + if isinstance(grandchild, ast.Assign): + for target in grandchild.targets: + if not isinstance(target, ast.Name): + continue + if target.id.lower().startswith('has_'): + found_has = True + if found_try_except_import and not found_has: + # TODO: Add line/col + self.reporter.warning( + path=self.object_path, + code='try-except-missing-has', + msg='Found Try/Except block without HAS_ assignment' + ) + + def _ensure_imports_below_docs(self, doc_info, first_callable): + try: + min_doc_line = min( + [doc_info[key]['lineno'] for key in doc_info if doc_info[key]['lineno']] + ) + except ValueError: + # We can't perform this validation, as there are no DOCs provided at all + return + + max_doc_line = max( + [doc_info[key]['end_lineno'] for key in doc_info if doc_info[key]['end_lineno']] + ) + + import_lines = [] + + for child in self.ast.body: + if isinstance(child, (ast.Import, ast.ImportFrom)): + if isinstance(child, ast.ImportFrom) and child.module == '__future__': + # allowed from __future__ imports + for future_import in child.names: + if future_import.name not in self.WHITELIST_FUTURE_IMPORTS: + self.reporter.error( + path=self.object_path, + code='illegal-future-imports', + msg=('Only the following from __future__ imports are allowed: %s' + % ', '.join(self.WHITELIST_FUTURE_IMPORTS)), + line=child.lineno + ) + break + else: # for-else. If we didn't find a problem nad break out of the loop, then this is a legal import + continue + import_lines.append(child.lineno) + if child.lineno < min_doc_line: + self.reporter.error( + path=self.object_path, + code='import-before-documentation', + msg=('Import found before documentation variables. ' + 'All imports must appear below ' + 'DOCUMENTATION/EXAMPLES/RETURN.'), + line=child.lineno + ) + break + elif isinstance(child, TRY_EXCEPT): + bodies = child.body + for handler in child.handlers: + bodies.extend(handler.body) + for grandchild in bodies: + if isinstance(grandchild, (ast.Import, ast.ImportFrom)): + import_lines.append(grandchild.lineno) + if grandchild.lineno < min_doc_line: + self.reporter.error( + path=self.object_path, + code='import-before-documentation', + msg=('Import found before documentation ' + 'variables. All imports must appear below ' + 'DOCUMENTATION/EXAMPLES/RETURN.'), + line=child.lineno + ) + break + + for import_line in import_lines: + if not (max_doc_line < import_line < first_callable): + msg = ( + 'import-placement', + ('Imports should be directly below DOCUMENTATION/EXAMPLES/' + 'RETURN.') + ) + if self._is_new_module(): + self.reporter.error( + path=self.object_path, + code=msg[0], + msg=msg[1], + line=import_line + ) + else: + self.reporter.warning( + path=self.object_path, + code=msg[0], + msg=msg[1], + line=import_line + ) + + def _validate_ps_replacers(self): + # loop all (for/else + error) + # get module list for each + # check "shape" of each module name + + module_requires = r'(?im)^#\s*requires\s+\-module(?:s?)\s*(Ansible\.ModuleUtils\..+)' + csharp_requires = r'(?im)^#\s*ansiblerequires\s+\-csharputil\s*(Ansible\..+)' + found_requires = False + + for req_stmt in re.finditer(module_requires, self.text): + found_requires = True + # this will bomb on dictionary format - "don't do that" + module_list = [x.strip() for x in req_stmt.group(1).split(',')] + if len(module_list) > 1: + self.reporter.error( + path=self.object_path, + code='multiple-utils-per-requires', + msg='Ansible.ModuleUtils requirements do not support multiple modules per statement: "%s"' % req_stmt.group(0) + ) + continue + + module_name = module_list[0] + + if module_name.lower().endswith('.psm1'): + self.reporter.error( + path=self.object_path, + code='invalid-requires-extension', + msg='Module #Requires should not end in .psm1: "%s"' % module_name + ) + + for req_stmt in re.finditer(csharp_requires, self.text): + found_requires = True + # this will bomb on dictionary format - "don't do that" + module_list = [x.strip() for x in req_stmt.group(1).split(',')] + if len(module_list) > 1: + self.reporter.error( + path=self.object_path, + code='multiple-csharp-utils-per-requires', + msg='Ansible C# util requirements do not support multiple utils per statement: "%s"' % req_stmt.group(0) + ) + continue + + module_name = module_list[0] + + if module_name.lower().endswith('.cs'): + self.reporter.error( + path=self.object_path, + code='illegal-extension-cs', + msg='Module #AnsibleRequires -CSharpUtil should not end in .cs: "%s"' % module_name + ) + + # also accept the legacy #POWERSHELL_COMMON replacer signal + if not found_requires and REPLACER_WINDOWS not in self.text: + self.reporter.error( + path=self.object_path, + code='missing-module-utils-import-csharp-requirements', + msg='No Ansible.ModuleUtils or C# Ansible util requirements/imports found' + ) + + def _find_ps_docs_py_file(self): + if self.object_name in self.PS_DOC_BLACKLIST: + return + py_path = self.path.replace('.ps1', '.py') + if not os.path.isfile(py_path): + self.reporter.error( + path=self.object_path, + code='missing-python-doc', + msg='Missing python documentation file' + ) + return py_path + + def _get_docs(self): + docs = { + 'DOCUMENTATION': { + 'value': None, + 'lineno': 0, + 'end_lineno': 0, + }, + 'EXAMPLES': { + 'value': None, + 'lineno': 0, + 'end_lineno': 0, + }, + 'RETURN': { + 'value': None, + 'lineno': 0, + 'end_lineno': 0, + }, + } + for child in self.ast.body: + if isinstance(child, ast.Assign): + for grandchild in child.targets: + if not isinstance(grandchild, ast.Name): + continue + + if grandchild.id == 'DOCUMENTATION': + docs['DOCUMENTATION']['value'] = child.value.s + docs['DOCUMENTATION']['lineno'] = child.lineno + docs['DOCUMENTATION']['end_lineno'] = ( + child.lineno + len(child.value.s.splitlines()) + ) + elif grandchild.id == 'EXAMPLES': + docs['EXAMPLES']['value'] = child.value.s + docs['EXAMPLES']['lineno'] = child.lineno + docs['EXAMPLES']['end_lineno'] = ( + child.lineno + len(child.value.s.splitlines()) + ) + elif grandchild.id == 'RETURN': + docs['RETURN']['value'] = child.value.s + docs['RETURN']['lineno'] = child.lineno + docs['RETURN']['end_lineno'] = ( + child.lineno + len(child.value.s.splitlines()) + ) + + return docs + + def _validate_docs_schema(self, doc, schema, name, error_code): + # TODO: Add line/col + errors = [] + try: + schema(doc) + except Exception as e: + for error in e.errors: + error.data = doc + errors.extend(e.errors) + + for error in errors: + path = [str(p) for p in error.path] + + local_error_code = getattr(error, 'ansible_error_code', error_code) + + if isinstance(error.data, dict): + error_message = humanize_error(error.data, error) + else: + error_message = error + + if path: + combined_path = '%s.%s' % (name, '.'.join(path)) + else: + combined_path = name + + self.reporter.error( + path=self.object_path, + code=local_error_code, + msg='%s: %s' % (combined_path, error_message) + ) + + def _validate_docs(self): + doc_info = self._get_docs() + doc = None + documentation_exists = False + examples_exist = False + returns_exist = False + # We have three ways of marking deprecated/removed files. Have to check each one + # individually and then make sure they all agree + filename_deprecated_or_removed = False + deprecated = False + removed = False + doc_deprecated = None # doc legally might not exist + routing_says_deprecated = False + + if self.object_name.startswith('_') and not os.path.islink(self.object_path): + filename_deprecated_or_removed = True + + # We are testing a collection + if self.routing: + routing_deprecation = self.routing.get('plugin_routing', {}).get('modules', {}).get(self.name, {}).get('deprecation', {}) + if routing_deprecation: + # meta/runtime.yml says this is deprecated + routing_says_deprecated = True + deprecated = True + + if not removed: + if not bool(doc_info['DOCUMENTATION']['value']): + self.reporter.error( + path=self.object_path, + code='missing-documentation', + msg='No DOCUMENTATION provided' + ) + else: + documentation_exists = True + doc, errors, traces = parse_yaml( + doc_info['DOCUMENTATION']['value'], + doc_info['DOCUMENTATION']['lineno'], + self.name, 'DOCUMENTATION' + ) + if doc: + add_collection_to_versions_and_dates(doc, self.collection_name, is_module=True) + for error in errors: + self.reporter.error( + path=self.object_path, + code='documentation-syntax-error', + **error + ) + for trace in traces: + self.reporter.trace( + path=self.object_path, + tracebk=trace + ) + if not errors and not traces: + missing_fragment = False + with CaptureStd(): + try: + get_docstring(self.path, fragment_loader, verbose=True, + collection_name=self.collection_name, is_module=True) + except AssertionError: + fragment = doc['extends_documentation_fragment'] + self.reporter.error( + path=self.object_path, + code='missing-doc-fragment', + msg='DOCUMENTATION fragment missing: %s' % fragment + ) + missing_fragment = True + except Exception as e: + self.reporter.trace( + path=self.object_path, + tracebk=traceback.format_exc() + ) + self.reporter.error( + path=self.object_path, + code='documentation-error', + msg='Unknown DOCUMENTATION error, see TRACE: %s' % e + ) + + if not missing_fragment: + add_fragments(doc, self.object_path, fragment_loader=fragment_loader, is_module=True) + + if 'options' in doc and doc['options'] is None: + self.reporter.error( + path=self.object_path, + code='invalid-documentation-options', + msg='DOCUMENTATION.options must be a dictionary/hash when used', + ) + + if 'deprecated' in doc and doc.get('deprecated'): + doc_deprecated = True + doc_deprecation = doc['deprecated'] + documentation_collection = doc_deprecation.get('removed_from_collection') + if documentation_collection != self.collection_name: + self.reporter.error( + path=self.object_path, + code='deprecation-wrong-collection', + msg='"DOCUMENTATION.deprecation.removed_from_collection must be the current collection name: %r vs. %r' % ( + documentation_collection, self.collection_name) + ) + else: + doc_deprecated = False + + if os.path.islink(self.object_path): + # This module has an alias, which we can tell as it's a symlink + # Rather than checking for `module: $filename` we need to check against the true filename + self._validate_docs_schema( + doc, + doc_schema( + os.readlink(self.object_path).split('.')[0], + for_collection=bool(self.collection), + deprecated_module=deprecated, + ), + 'DOCUMENTATION', + 'invalid-documentation', + ) + else: + # This is the normal case + self._validate_docs_schema( + doc, + doc_schema( + self.object_name.split('.')[0], + for_collection=bool(self.collection), + deprecated_module=deprecated, + ), + 'DOCUMENTATION', + 'invalid-documentation', + ) + + if not self.collection: + existing_doc = self._check_for_new_args(doc) + self._check_version_added(doc, existing_doc) + + if not bool(doc_info['EXAMPLES']['value']): + self.reporter.error( + path=self.object_path, + code='missing-examples', + msg='No EXAMPLES provided' + ) + else: + _doc, errors, traces = parse_yaml(doc_info['EXAMPLES']['value'], + doc_info['EXAMPLES']['lineno'], + self.name, 'EXAMPLES', load_all=True) + for error in errors: + self.reporter.error( + path=self.object_path, + code='invalid-examples', + **error + ) + for trace in traces: + self.reporter.trace( + path=self.object_path, + tracebk=trace + ) + + if not bool(doc_info['RETURN']['value']): + if self._is_new_module(): + self.reporter.error( + path=self.object_path, + code='missing-return', + msg='No RETURN provided' + ) + else: + self.reporter.warning( + path=self.object_path, + code='missing-return-legacy', + msg='No RETURN provided' + ) + else: + data, errors, traces = parse_yaml(doc_info['RETURN']['value'], + doc_info['RETURN']['lineno'], + self.name, 'RETURN') + if data: + add_collection_to_versions_and_dates(data, self.collection_name, is_module=True, return_docs=True) + self._validate_docs_schema(data, return_schema(for_collection=bool(self.collection)), + 'RETURN', 'return-syntax-error') + + for error in errors: + self.reporter.error( + path=self.object_path, + code='return-syntax-error', + **error + ) + for trace in traces: + self.reporter.trace( + path=self.object_path, + tracebk=trace + ) + + # Check for mismatched deprecation + if not self.collection: + mismatched_deprecation = True + if not (filename_deprecated_or_removed or removed or deprecated or doc_deprecated): + mismatched_deprecation = False + else: + if (filename_deprecated_or_removed and deprecated and doc_deprecated): + mismatched_deprecation = False + if (filename_deprecated_or_removed and removed and not (documentation_exists or examples_exist or returns_exist)): + mismatched_deprecation = False + + if mismatched_deprecation: + self.reporter.error( + path=self.object_path, + code='deprecation-mismatch', + msg='Module deprecation/removed must agree in documentaiton, by prepending filename with' + ' "_", and setting DOCUMENTATION.deprecated for deprecation or by removing all' + ' documentation for removed' + ) + else: + # We are testing a collection + if self.object_name.startswith('_'): + self.reporter.error( + path=self.object_path, + code='collections-no-underscore-on-deprecation', + msg='Deprecated content in collections MUST NOT start with "_", update meta/runtime.yml instead', + ) + + if not (doc_deprecated == routing_says_deprecated): + # DOCUMENTATION.deprecated and meta/runtime.yml disagree + self.reporter.error( + path=self.object_path, + code='deprecation-mismatch', + msg='"meta/runtime.yml" and DOCUMENTATION.deprecation do not agree.' + ) + elif routing_says_deprecated: + # Both DOCUMENTATION.deprecated and meta/runtime.yml agree that the module is deprecated. + # Make sure they give the same version or date. + routing_date = routing_deprecation.get('removal_date') + routing_version = routing_deprecation.get('removal_version') + # The versions and dates in the module documentation are auto-tagged, so remove the tag + # to make comparison possible and to avoid confusing the user. + documentation_date = doc_deprecation.get('removed_at_date') + documentation_version = doc_deprecation.get('removed_in') + if not compare_dates(routing_date, documentation_date): + self.reporter.error( + path=self.object_path, + code='deprecation-mismatch', + msg='"meta/runtime.yml" and DOCUMENTATION.deprecation do not agree on removal date: %r vs. %r' % ( + routing_date, documentation_date) + ) + if routing_version != documentation_version: + self.reporter.error( + path=self.object_path, + code='deprecation-mismatch', + msg='"meta/runtime.yml" and DOCUMENTATION.deprecation do not agree on removal version: %r vs. %r' % ( + routing_version, documentation_version) + ) + + # In the future we should error if ANSIBLE_METADATA exists in a collection + + return doc_info, doc + + def _check_version_added(self, doc, existing_doc): + version_added_raw = doc.get('version_added') + try: + collection_name = doc.get('version_added_collection') + version_added = self._create_strict_version( + str(version_added_raw or '0.0'), + collection_name=collection_name) + except ValueError as e: + version_added = version_added_raw or '0.0' + if self._is_new_module() or version_added != 'historical': + # already reported during schema validation, except: + if version_added == 'historical': + self.reporter.error( + path=self.object_path, + code='module-invalid-version-added', + msg='version_added is not a valid version number: %r. Error: %s' % (version_added, e) + ) + return + + if existing_doc and str(version_added_raw) != str(existing_doc.get('version_added')): + self.reporter.error( + path=self.object_path, + code='module-incorrect-version-added', + msg='version_added should be %r. Currently %r' % (existing_doc.get('version_added'), version_added_raw) + ) + + if not self._is_new_module(): + return + + should_be = '.'.join(ansible_version.split('.')[:2]) + strict_ansible_version = self._create_strict_version(should_be, collection_name='ansible.builtin') + + if (version_added < strict_ansible_version or + strict_ansible_version < version_added): + self.reporter.error( + path=self.object_path, + code='module-incorrect-version-added', + msg='version_added should be %r. Currently %r' % (should_be, version_added_raw) + ) + + def _validate_ansible_module_call(self, docs): + try: + spec, args, kwargs = get_argument_spec(self.path, self.collection) + except AnsibleModuleNotInitialized: + self.reporter.error( + path=self.object_path, + code='ansible-module-not-initialized', + msg="Execution of the module did not result in initialization of AnsibleModule", + ) + return + except AnsibleModuleImportError as e: + self.reporter.error( + path=self.object_path, + code='import-error', + msg="Exception attempting to import module for argument_spec introspection, '%s'" % e + ) + self.reporter.trace( + path=self.object_path, + tracebk=traceback.format_exc() + ) + return + + self._validate_docs_schema(kwargs, ansible_module_kwargs_schema(for_collection=bool(self.collection)), + 'AnsibleModule', 'invalid-ansiblemodule-schema') + + self._validate_argument_spec(docs, spec, kwargs) + + def _validate_list_of_module_args(self, name, terms, spec, context): + if terms is None: + return + if not isinstance(terms, (list, tuple)): + # This is already reported by schema checking + return + for check in terms: + if not isinstance(check, (list, tuple)): + # This is already reported by schema checking + continue + bad_term = False + for term in check: + if not isinstance(term, string_types): + msg = name + if context: + msg += " found in %s" % " -> ".join(context) + msg += " must contain strings in the lists or tuples; found value %r" % (term, ) + self.reporter.error( + path=self.object_path, + code=name + '-type', + msg=msg, + ) + bad_term = True + if bad_term: + continue + if len(set(check)) != len(check): + msg = name + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has repeated terms" + self.reporter.error( + path=self.object_path, + code=name + '-collision', + msg=msg, + ) + if not set(check) <= set(spec): + msg = name + if context: + msg += " found in %s" % " -> ".join(context) + msg += " contains terms which are not part of argument_spec: %s" % ", ".join(sorted(set(check).difference(set(spec)))) + self.reporter.error( + path=self.object_path, + code=name + '-unknown', + msg=msg, + ) + + def _validate_required_if(self, terms, spec, context, module): + if terms is None: + return + if not isinstance(terms, (list, tuple)): + # This is already reported by schema checking + return + for check in terms: + if not isinstance(check, (list, tuple)) or len(check) not in [3, 4]: + # This is already reported by schema checking + continue + if len(check) == 4 and not isinstance(check[3], bool): + msg = "required_if" + if context: + msg += " found in %s" % " -> ".join(context) + msg += " must have forth value omitted or of type bool; got %r" % (check[3], ) + self.reporter.error( + path=self.object_path, + code='required_if-is_one_of-type', + msg=msg, + ) + requirements = check[2] + if not isinstance(requirements, (list, tuple)): + msg = "required_if" + if context: + msg += " found in %s" % " -> ".join(context) + msg += " must have third value (requirements) being a list or tuple; got type %r" % (requirements, ) + self.reporter.error( + path=self.object_path, + code='required_if-requirements-type', + msg=msg, + ) + continue + bad_term = False + for term in requirements: + if not isinstance(term, string_types): + msg = "required_if" + if context: + msg += " found in %s" % " -> ".join(context) + msg += " must have only strings in third value (requirements); got %r" % (term, ) + self.reporter.error( + path=self.object_path, + code='required_if-requirements-type', + msg=msg, + ) + bad_term = True + if bad_term: + continue + if len(set(requirements)) != len(requirements): + msg = "required_if" + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has repeated terms in requirements" + self.reporter.error( + path=self.object_path, + code='required_if-requirements-collision', + msg=msg, + ) + if not set(requirements) <= set(spec): + msg = "required_if" + if context: + msg += " found in %s" % " -> ".join(context) + msg += " contains terms in requirements which are not part of argument_spec: %s" % ", ".join(sorted(set(requirements).difference(set(spec)))) + self.reporter.error( + path=self.object_path, + code='required_if-requirements-unknown', + msg=msg, + ) + key = check[0] + if key not in spec: + msg = "required_if" + if context: + msg += " found in %s" % " -> ".join(context) + msg += " must have its key %s in argument_spec" % key + self.reporter.error( + path=self.object_path, + code='required_if-unknown-key', + msg=msg, + ) + continue + if key in requirements: + msg = "required_if" + if context: + msg += " found in %s" % " -> ".join(context) + msg += " contains its key %s in requirements" % key + self.reporter.error( + path=self.object_path, + code='required_if-key-in-requirements', + msg=msg, + ) + value = check[1] + if value is not None: + _type = spec[key].get('type', 'str') + if callable(_type): + _type_checker = _type + else: + _type_checker = module._CHECK_ARGUMENT_TYPES_DISPATCHER.get(_type) + try: + with CaptureStd(): + dummy = _type_checker(value) + except (Exception, SystemExit): + msg = "required_if" + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has value %r which does not fit to %s's parameter type %r" % (value, key, _type) + self.reporter.error( + path=self.object_path, + code='required_if-value-type', + msg=msg, + ) + + def _validate_required_by(self, terms, spec, context): + if terms is None: + return + if not isinstance(terms, Mapping): + # This is already reported by schema checking + return + for key, value in terms.items(): + if isinstance(value, string_types): + value = [value] + if not isinstance(value, (list, tuple)): + # This is already reported by schema checking + continue + for term in value: + if not isinstance(term, string_types): + # This is already reported by schema checking + continue + if len(set(value)) != len(value) or key in value: + msg = "required_by" + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has repeated terms" + self.reporter.error( + path=self.object_path, + code='required_by-collision', + msg=msg, + ) + if not set(value) <= set(spec) or key not in spec: + msg = "required_by" + if context: + msg += " found in %s" % " -> ".join(context) + msg += " contains terms which are not part of argument_spec: %s" % ", ".join(sorted(set(value).difference(set(spec)))) + self.reporter.error( + path=self.object_path, + code='required_by-unknown', + msg=msg, + ) + + def _validate_argument_spec(self, docs, spec, kwargs, context=None, last_context_spec=None): + if not self.analyze_arg_spec: + return + + if docs is None: + docs = {} + + if context is None: + context = [] + + if last_context_spec is None: + last_context_spec = kwargs + + try: + if not context: + add_fragments(docs, self.object_path, fragment_loader=fragment_loader, is_module=True) + except Exception: + # Cannot merge fragments + return + + # Use this to access type checkers later + module = NoArgsAnsibleModule({}) + + self._validate_list_of_module_args('mutually_exclusive', last_context_spec.get('mutually_exclusive'), spec, context) + self._validate_list_of_module_args('required_together', last_context_spec.get('required_together'), spec, context) + self._validate_list_of_module_args('required_one_of', last_context_spec.get('required_one_of'), spec, context) + self._validate_required_if(last_context_spec.get('required_if'), spec, context, module) + self._validate_required_by(last_context_spec.get('required_by'), spec, context) + + provider_args = set() + args_from_argspec = set() + deprecated_args_from_argspec = set() + doc_options = docs.get('options', {}) + if doc_options is None: + doc_options = {} + for arg, data in spec.items(): + restricted_argument_names = ('message', 'syslog_facility') + if arg.lower() in restricted_argument_names: + msg = "Argument '%s' in argument_spec " % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += "must not be one of %s as it is used " \ + "internally by Ansible Core Engine" % (",".join(restricted_argument_names)) + self.reporter.error( + path=self.object_path, + code='invalid-argument-name', + msg=msg, + ) + continue + if 'aliases' in data: + for al in data['aliases']: + if al.lower() in restricted_argument_names: + msg = "Argument alias '%s' in argument_spec " % al + if context: + msg += " found in %s" % " -> ".join(context) + msg += "must not be one of %s as it is used " \ + "internally by Ansible Core Engine" % (",".join(restricted_argument_names)) + self.reporter.error( + path=self.object_path, + code='invalid-argument-name', + msg=msg, + ) + continue + + if not isinstance(data, dict): + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " must be a dictionary/hash when used" + self.reporter.error( + path=self.object_path, + code='invalid-argument-spec', + msg=msg, + ) + continue + + removed_at_date = data.get('removed_at_date', None) + if removed_at_date is not None: + try: + if parse_isodate(removed_at_date, allow_date=False) < datetime.date.today(): + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has a removed_at_date '%s' before today" % removed_at_date + self.reporter.error( + path=self.object_path, + code='deprecated-date', + msg=msg, + ) + except ValueError: + # This should only happen when removed_at_date is not in ISO format. Since schema + # validation already reported this as an error, don't report it a second time. + pass + + deprecated_aliases = data.get('deprecated_aliases', None) + if deprecated_aliases is not None: + for deprecated_alias in deprecated_aliases: + if 'name' in deprecated_alias and 'date' in deprecated_alias: + try: + date = deprecated_alias['date'] + if parse_isodate(date, allow_date=False) < datetime.date.today(): + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has deprecated aliases '%s' with removal date '%s' before today" % ( + deprecated_alias['name'], deprecated_alias['date']) + self.reporter.error( + path=self.object_path, + code='deprecated-date', + msg=msg, + ) + except ValueError: + # This should only happen when deprecated_alias['date'] is not in ISO format. Since + # schema validation already reported this as an error, don't report it a second + # time. + pass + + has_version = False + if self.collection and self.collection_version is not None: + compare_version = self.collection_version + version_of_what = "this collection (%s)" % self.collection_version_str + code_prefix = 'collection' + has_version = True + elif not self.collection: + compare_version = LOOSE_ANSIBLE_VERSION + version_of_what = "Ansible (%s)" % ansible_version + code_prefix = 'ansible' + has_version = True + + removed_in_version = data.get('removed_in_version', None) + if removed_in_version is not None: + try: + collection_name = data.get('removed_from_collection') + removed_in = self._create_version(str(removed_in_version), collection_name=collection_name) + if has_version and collection_name == self.collection_name and compare_version >= removed_in: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has a deprecated removed_in_version %r," % removed_in_version + msg += " i.e. the version is less than or equal to the current version of %s" % version_of_what + self.reporter.error( + path=self.object_path, + code=code_prefix + '-deprecated-version', + msg=msg, + ) + except ValueError as e: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has an invalid removed_in_version number %r: %s" % (removed_in_version, e) + self.reporter.error( + path=self.object_path, + code='invalid-deprecated-version', + msg=msg, + ) + except TypeError: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has an invalid removed_in_version number %r: " % (removed_in_version, ) + msg += " error while comparing to version of %s" % version_of_what + self.reporter.error( + path=self.object_path, + code='invalid-deprecated-version', + msg=msg, + ) + + if deprecated_aliases is not None: + for deprecated_alias in deprecated_aliases: + if 'name' in deprecated_alias and 'version' in deprecated_alias: + try: + collection_name = deprecated_alias.get('collection_name') + version = self._create_version(str(deprecated_alias['version']), collection_name=collection_name) + if has_version and collection_name == self.collection_name and compare_version >= version: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has deprecated aliases '%s' with removal in version %r," % ( + deprecated_alias['name'], deprecated_alias['version']) + msg += " i.e. the version is less than or equal to the current version of %s" % version_of_what + self.reporter.error( + path=self.object_path, + code=code_prefix + '-deprecated-version', + msg=msg, + ) + except ValueError as e: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has deprecated aliases '%s' with invalid removal version %r: %s" % ( + deprecated_alias['name'], deprecated_alias['version'], e) + self.reporter.error( + path=self.object_path, + code='invalid-deprecated-version', + msg=msg, + ) + except TypeError: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has deprecated aliases '%s' with invalid removal version %r:" % ( + deprecated_alias['name'], deprecated_alias['version']) + msg += " error while comparing to version of %s" % version_of_what + self.reporter.error( + path=self.object_path, + code='invalid-deprecated-version', + msg=msg, + ) + + aliases = data.get('aliases', []) + if arg in aliases: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " is specified as its own alias" + self.reporter.error( + path=self.object_path, + code='parameter-alias-self', + msg=msg + ) + if len(aliases) > len(set(aliases)): + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has at least one alias specified multiple times in aliases" + self.reporter.error( + path=self.object_path, + code='parameter-alias-repeated', + msg=msg + ) + if not context and arg == 'state': + bad_states = set(['list', 'info', 'get']) & set(data.get('choices', set())) + for bad_state in bad_states: + self.reporter.error( + path=self.object_path, + code='parameter-state-invalid-choice', + msg="Argument 'state' includes the value '%s' as a choice" % bad_state) + if not data.get('removed_in_version', None) and not data.get('removed_at_date', None): + args_from_argspec.add(arg) + args_from_argspec.update(aliases) + else: + deprecated_args_from_argspec.add(arg) + deprecated_args_from_argspec.update(aliases) + if arg == 'provider' and self.object_path.startswith('lib/ansible/modules/network/'): + if data.get('options') is not None and not isinstance(data.get('options'), Mapping): + self.reporter.error( + path=self.object_path, + code='invalid-argument-spec-options', + msg="Argument 'options' in argument_spec['provider'] must be a dictionary/hash when used", + ) + elif data.get('options'): + # Record provider options from network modules, for later comparison + for provider_arg, provider_data in data.get('options', {}).items(): + provider_args.add(provider_arg) + provider_args.update(provider_data.get('aliases', [])) + + if data.get('required') and data.get('default', object) != object: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " is marked as required but specifies a default. Arguments with a" \ + " default should not be marked as required" + self.reporter.error( + path=self.object_path, + code='no-default-for-required-parameter', + msg=msg + ) + + if arg in provider_args: + # Provider args are being removed from network module top level + # don't validate docs<->arg_spec checks below + continue + + _type = data.get('type', 'str') + if callable(_type): + _type_checker = _type + else: + _type_checker = module._CHECK_ARGUMENT_TYPES_DISPATCHER.get(_type) + + _elements = data.get('elements') + if (_type == 'list') and not _elements: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " defines type as list but elements is not defined" + self.reporter.error( + path=self.object_path, + code='parameter-list-no-elements', + msg=msg + ) + if _elements: + if not callable(_elements): + module._CHECK_ARGUMENT_TYPES_DISPATCHER.get(_elements) + if _type != 'list': + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " defines elements as %s but it is valid only when value of parameter type is list" % _elements + self.reporter.error( + path=self.object_path, + code='parameter-invalid-elements', + msg=msg + ) + + arg_default = None + if 'default' in data and not is_empty(data['default']): + try: + with CaptureStd(): + arg_default = _type_checker(data['default']) + except (Exception, SystemExit): + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " defines default as (%r) but this is incompatible with parameter type %r" % (data['default'], _type) + self.reporter.error( + path=self.object_path, + code='incompatible-default-type', + msg=msg + ) + continue + + doc_options_args = [] + for alias in sorted(set([arg] + list(aliases))): + if alias in doc_options: + doc_options_args.append(alias) + if len(doc_options_args) == 0: + # Undocumented arguments will be handled later (search for undocumented-parameter) + doc_options_arg = {} + else: + doc_options_arg = doc_options[doc_options_args[0]] + if len(doc_options_args) > 1: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " with aliases %s is documented multiple times, namely as %s" % ( + ", ".join([("'%s'" % alias) for alias in aliases]), + ", ".join([("'%s'" % alias) for alias in doc_options_args]) + ) + self.reporter.error( + path=self.object_path, + code='parameter-documented-multiple-times', + msg=msg + ) + + try: + doc_default = None + if 'default' in doc_options_arg and not is_empty(doc_options_arg['default']): + with CaptureStd(): + doc_default = _type_checker(doc_options_arg['default']) + except (Exception, SystemExit): + msg = "Argument '%s' in documentation" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " defines default as (%r) but this is incompatible with parameter type %r" % (doc_options_arg.get('default'), _type) + self.reporter.error( + path=self.object_path, + code='doc-default-incompatible-type', + msg=msg + ) + continue + + if arg_default != doc_default: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " defines default as (%r) but documentation defines default as (%r)" % (arg_default, doc_default) + self.reporter.error( + path=self.object_path, + code='doc-default-does-not-match-spec', + msg=msg + ) + + doc_type = doc_options_arg.get('type') + if 'type' in data and data['type'] is not None: + if doc_type is None: + if not arg.startswith('_'): # hidden parameter, for example _raw_params + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " defines type as %r but documentation doesn't define type" % (data['type']) + self.reporter.error( + path=self.object_path, + code='parameter-type-not-in-doc', + msg=msg + ) + elif data['type'] != doc_type: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " defines type as %r but documentation defines type as %r" % (data['type'], doc_type) + self.reporter.error( + path=self.object_path, + code='doc-type-does-not-match-spec', + msg=msg + ) + else: + if doc_type is None: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " uses default type ('str') but documentation doesn't define type" + self.reporter.error( + path=self.object_path, + code='doc-missing-type', + msg=msg + ) + elif doc_type != 'str': + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " implies type as 'str' but documentation defines as %r" % doc_type + self.reporter.error( + path=self.object_path, + code='implied-parameter-type-mismatch', + msg=msg + ) + + doc_choices = [] + try: + for choice in doc_options_arg.get('choices', []): + try: + with CaptureStd(): + doc_choices.append(_type_checker(choice)) + except (Exception, SystemExit): + msg = "Argument '%s' in documentation" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " defines choices as (%r) but this is incompatible with argument type %r" % (choice, _type) + self.reporter.error( + path=self.object_path, + code='doc-choices-incompatible-type', + msg=msg + ) + raise StopIteration() + except StopIteration: + continue + + arg_choices = [] + try: + for choice in data.get('choices', []): + try: + with CaptureStd(): + arg_choices.append(_type_checker(choice)) + except (Exception, SystemExit): + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " defines choices as (%r) but this is incompatible with argument type %r" % (choice, _type) + self.reporter.error( + path=self.object_path, + code='incompatible-choices', + msg=msg + ) + raise StopIteration() + except StopIteration: + continue + + if not compare_unordered_lists(arg_choices, doc_choices): + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " defines choices as (%r) but documentation defines choices as (%r)" % (arg_choices, doc_choices) + self.reporter.error( + path=self.object_path, + code='doc-choices-do-not-match-spec', + msg=msg + ) + + doc_required = doc_options_arg.get('required', False) + data_required = data.get('required', False) + if (doc_required or data_required) and not (doc_required and data_required): + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + if doc_required: + msg += " is not required, but is documented as being required" + else: + msg += " is required, but is not documented as being required" + self.reporter.error( + path=self.object_path, + code='doc-required-mismatch', + msg=msg + ) + + doc_elements = doc_options_arg.get('elements', None) + doc_type = doc_options_arg.get('type', 'str') + data_elements = data.get('elements', None) + if (doc_elements and not doc_type == 'list'): + msg = "Argument '%s " % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " defines parameter elements as %s but it is valid only when value of parameter type is list" % doc_elements + self.reporter.error( + path=self.object_path, + code='doc-elements-invalid', + msg=msg + ) + if (doc_elements or data_elements) and not (doc_elements == data_elements): + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + if data_elements: + msg += " specifies elements as %s," % data_elements + else: + msg += " does not specify elements," + if doc_elements: + msg += "but elements is documented as being %s" % doc_elements + else: + msg += "but elements is not documented" + self.reporter.error( + path=self.object_path, + code='doc-elements-mismatch', + msg=msg + ) + + spec_suboptions = data.get('options') + doc_suboptions = doc_options_arg.get('suboptions', {}) + if spec_suboptions: + if not doc_suboptions: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has sub-options but documentation does not define it" + self.reporter.error( + path=self.object_path, + code='missing-suboption-docs', + msg=msg + ) + self._validate_argument_spec({'options': doc_suboptions}, spec_suboptions, kwargs, + context=context + [arg], last_context_spec=data) + + for arg in args_from_argspec: + if not str(arg).isidentifier(): + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " is not a valid python identifier" + self.reporter.error( + path=self.object_path, + code='parameter-invalid', + msg=msg + ) + + if docs: + args_from_docs = set() + for arg, data in doc_options.items(): + args_from_docs.add(arg) + args_from_docs.update(data.get('aliases', [])) + + args_missing_from_docs = args_from_argspec.difference(args_from_docs) + docs_missing_from_args = args_from_docs.difference(args_from_argspec | deprecated_args_from_argspec) + for arg in args_missing_from_docs: + if arg in provider_args: + # Provider args are being removed from network module top level + # So they are likely not documented on purpose + continue + msg = "Argument '%s'" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " is listed in the argument_spec, but not documented in the module documentation" + self.reporter.error( + path=self.object_path, + code='undocumented-parameter', + msg=msg + ) + for arg in docs_missing_from_args: + msg = "Argument '%s'" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " is listed in DOCUMENTATION.options, but not accepted by the module argument_spec" + self.reporter.error( + path=self.object_path, + code='nonexistent-parameter-documented', + msg=msg + ) + + def _check_for_new_args(self, doc): + if not self.base_branch or self._is_new_module(): + return + + with CaptureStd(): + try: + existing_doc, dummy_examples, dummy_return, existing_metadata = get_docstring( + self.base_module, fragment_loader, verbose=True, collection_name=self.collection_name, is_module=True) + existing_options = existing_doc.get('options', {}) or {} + except AssertionError: + fragment = doc['extends_documentation_fragment'] + self.reporter.warning( + path=self.object_path, + code='missing-existing-doc-fragment', + msg='Pre-existing DOCUMENTATION fragment missing: %s' % fragment + ) + return + except Exception as e: + self.reporter.warning_trace( + path=self.object_path, + tracebk=e + ) + self.reporter.warning( + path=self.object_path, + code='unknown-doc-fragment', + msg=('Unknown pre-existing DOCUMENTATION error, see TRACE. Submodule refs may need updated') + ) + return + + try: + mod_collection_name = existing_doc.get('version_added_collection') + mod_version_added = self._create_strict_version( + str(existing_doc.get('version_added', '0.0')), + collection_name=mod_collection_name) + except ValueError: + mod_collection_name = self.collection_name + mod_version_added = self._create_strict_version('0.0') + + options = doc.get('options', {}) or {} + + should_be = '.'.join(ansible_version.split('.')[:2]) + strict_ansible_version = self._create_strict_version(should_be, collection_name='ansible.builtin') + + for option, details in options.items(): + try: + names = [option] + details.get('aliases', []) + except (TypeError, AttributeError): + # Reporting of this syntax error will be handled by schema validation. + continue + + if any(name in existing_options for name in names): + # The option already existed. Make sure version_added didn't change. + for name in names: + existing_collection_name = existing_options.get(name, {}).get('version_added_collection') + existing_version = existing_options.get(name, {}).get('version_added') + if existing_version: + break + current_collection_name = details.get('version_added_collection') + current_version = details.get('version_added') + if current_collection_name != existing_collection_name: + self.reporter.error( + path=self.object_path, + code='option-incorrect-version-added-collection', + msg=('version_added for existing option (%s) should ' + 'belong to collection %r. Currently belongs to %r' % + (option, current_collection_name, existing_collection_name)) + ) + elif str(current_version) != str(existing_version): + self.reporter.error( + path=self.object_path, + code='option-incorrect-version-added', + msg=('version_added for existing option (%s) should ' + 'be %r. Currently %r' % + (option, existing_version, current_version)) + ) + continue + + try: + collection_name = details.get('version_added_collection') + version_added = self._create_strict_version( + str(details.get('version_added', '0.0')), + collection_name=collection_name) + except ValueError as e: + # already reported during schema validation + continue + + if collection_name != self.collection_name: + continue + if (strict_ansible_version != mod_version_added and + (version_added < strict_ansible_version or + strict_ansible_version < version_added)): + self.reporter.error( + path=self.object_path, + code='option-incorrect-version-added', + msg=('version_added for new option (%s) should ' + 'be %r. Currently %r' % + (option, should_be, version_added)) + ) + + return existing_doc + + @staticmethod + def is_blacklisted(path): + base_name = os.path.basename(path) + file_name = os.path.splitext(base_name)[0] + + if file_name.startswith('_') and os.path.islink(path): + return True + + if not frozenset((base_name, file_name)).isdisjoint(ModuleValidator.BLACKLIST): + return True + + for pat in ModuleValidator.BLACKLIST_PATTERNS: + if fnmatch(base_name, pat): + return True + + return False + + def validate(self): + super(ModuleValidator, self).validate() + if not self._python_module() and not self._powershell_module(): + self.reporter.error( + path=self.object_path, + code='invalid-extension', + msg=('Official Ansible modules must have a .py ' + 'extension for python modules or a .ps1 ' + 'for powershell modules') + ) + self._python_module_override = True + + if self._python_module() and self.ast is None: + self.reporter.error( + path=self.object_path, + code='python-syntax-error', + msg='Python SyntaxError while parsing module' + ) + try: + compile(self.text, self.path, 'exec') + except Exception: + self.reporter.trace( + path=self.object_path, + tracebk=traceback.format_exc() + ) + return + + end_of_deprecation_should_be_removed_only = False + if self._python_module(): + doc_info, docs = self._validate_docs() + + # See if current version => deprecated.removed_in, ie, should be docs only + if docs and docs.get('deprecated', False): + + if 'removed_in' in docs['deprecated']: + removed_in = None + collection_name = docs['deprecated'].get('removed_from_collection') + version = docs['deprecated']['removed_in'] + if collection_name != self.collection_name: + self.reporter.error( + path=self.object_path, + code='invalid-module-deprecation-source', + msg=('The deprecation version for a module must be added in this collection') + ) + else: + try: + removed_in = self._create_strict_version(str(version), collection_name=collection_name) + except ValueError as e: + self.reporter.error( + path=self.object_path, + code='invalid-module-deprecation-version', + msg=('The deprecation version %r cannot be parsed: %s' % (version, e)) + ) + + if removed_in: + if not self.collection: + strict_ansible_version = self._create_strict_version( + '.'.join(ansible_version.split('.')[:2]), self.collection_name) + end_of_deprecation_should_be_removed_only = strict_ansible_version >= removed_in + elif self.collection_version: + strict_ansible_version = self.collection_version + end_of_deprecation_should_be_removed_only = strict_ansible_version >= removed_in + + # handle deprecation by date + if 'removed_at_date' in docs['deprecated']: + try: + removed_at_date = docs['deprecated']['removed_at_date'] + if parse_isodate(removed_at_date, allow_date=True) < datetime.date.today(): + msg = "Module's deprecated.removed_at_date date '%s' is before today" % removed_at_date + self.reporter.error(path=self.object_path, code='deprecated-date', msg=msg) + except ValueError: + # This happens if the date cannot be parsed. This is already checked by the schema. + pass + + if self._python_module() and not self._just_docs() and not end_of_deprecation_should_be_removed_only: + self._validate_ansible_module_call(docs) + self._check_for_sys_exit() + self._find_blacklist_imports() + main = self._find_main_call() + self._find_module_utils(main) + self._find_has_import() + first_callable = self._get_first_callable() + self._ensure_imports_below_docs(doc_info, first_callable) + self._check_for_subprocess() + self._check_for_os_call() + + if self._powershell_module(): + if self.basename in self.PS_DOC_BLACKLIST: + return + + self._validate_ps_replacers() + docs_path = self._find_ps_docs_py_file() + + # We can only validate PowerShell arg spec if it is using the new Ansible.Basic.AnsibleModule util + pattern = r'(?im)^#\s*ansiblerequires\s+\-csharputil\s*Ansible\.Basic' + if re.search(pattern, self.text) and self.object_name not in self.PS_ARG_VALIDATE_BLACKLIST: + with ModuleValidator(docs_path, base_branch=self.base_branch, git_cache=self.git_cache) as docs_mv: + docs = docs_mv._validate_docs()[1] + self._validate_ansible_module_call(docs) + + self._check_gpl3_header() + if not self._just_docs() and not end_of_deprecation_should_be_removed_only: + self._check_interpreter(powershell=self._powershell_module()) + self._check_type_instead_of_isinstance( + powershell=self._powershell_module() + ) + if end_of_deprecation_should_be_removed_only: + # Ensure that `if __name__ == '__main__':` calls `removed_module()` which ensure that the module has no code in + main = self._find_main_call('removed_module') + # FIXME: Ensure that the version in the call to removed_module is less than +2. + # Otherwise it's time to remove the file (This may need to be done in another test to + # avoid breaking whenever the Ansible version bumps) + + +class PythonPackageValidator(Validator): + BLACKLIST_FILES = frozenset(('__pycache__',)) + + def __init__(self, path, reporter=None): + super(PythonPackageValidator, self).__init__(reporter=reporter or Reporter()) + + self.path = path + self.basename = os.path.basename(path) + + @property + def object_name(self): + return self.basename + + @property + def object_path(self): + return self.path + + def validate(self): + super(PythonPackageValidator, self).validate() + + if self.basename in self.BLACKLIST_FILES: + return + + init_file = os.path.join(self.path, '__init__.py') + if not os.path.exists(init_file): + self.reporter.error( + path=self.object_path, + code='subdirectory-missing-init', + msg='Ansible module subdirectories must contain an __init__.py' + ) + + +def setup_collection_loader(): + collections_paths = os.environ.get('ANSIBLE_COLLECTIONS_PATH', '').split(os.pathsep) + _AnsibleCollectionFinder(collections_paths) + + +def re_compile(value): + """ + Argparse expects things to raise TypeError, re.compile raises an re.error + exception + + This function is a shorthand to convert the re.error exception to a + TypeError + """ + + try: + return re.compile(value) + except re.error as e: + raise TypeError(e) + + +def run(): + parser = argparse.ArgumentParser(prog="validate-modules") + parser.add_argument('modules', nargs='+', + help='Path to module or module directory') + parser.add_argument('-w', '--warnings', help='Show warnings', + action='store_true') + parser.add_argument('--exclude', help='RegEx exclusion pattern', + type=re_compile) + parser.add_argument('--arg-spec', help='Analyze module argument spec', + action='store_true', default=False) + parser.add_argument('--base-branch', default=None, + help='Used in determining if new options were added') + parser.add_argument('--format', choices=['json', 'plain'], default='plain', + help='Output format. Default: "%(default)s"') + parser.add_argument('--output', default='-', + help='Output location, use "-" for stdout. ' + 'Default "%(default)s"') + parser.add_argument('--collection', + help='Specifies the path to the collection, when ' + 'validating files within a collection. Ensure ' + 'that ANSIBLE_COLLECTIONS_PATH is set so the ' + 'contents of the collection can be located') + parser.add_argument('--collection-version', + help='The collection\'s version number used to check ' + 'deprecations') + + args = parser.parse_args() + + args.modules = [m.rstrip('/') for m in args.modules] + + reporter = Reporter() + git_cache = GitCache(args.base_branch) + + check_dirs = set() + + routing = None + if args.collection: + setup_collection_loader() + routing_file = 'meta/runtime.yml' + # Load meta/runtime.yml if it exists, as it may contain deprecation information + if os.path.isfile(routing_file): + try: + with open(routing_file) as f: + routing = yaml.safe_load(f) + except yaml.error.MarkedYAMLError as ex: + print('%s:%d:%d: YAML load failed: %s' % (routing_file, ex.context_mark.line + 1, ex.context_mark.column + 1, re.sub(r'\s+', ' ', str(ex)))) + except Exception as ex: # pylint: disable=broad-except + print('%s:%d:%d: YAML load failed: %s' % (routing_file, 0, 0, re.sub(r'\s+', ' ', str(ex)))) + + for module in args.modules: + if os.path.isfile(module): + path = module + if args.exclude and args.exclude.search(path): + continue + if ModuleValidator.is_blacklisted(path): + continue + with ModuleValidator(path, collection=args.collection, collection_version=args.collection_version, + analyze_arg_spec=args.arg_spec, base_branch=args.base_branch, + git_cache=git_cache, reporter=reporter, routing=routing) as mv1: + mv1.validate() + check_dirs.add(os.path.dirname(path)) + + for root, dirs, files in os.walk(module): + basedir = root[len(module) + 1:].split('/', 1)[0] + if basedir in BLACKLIST_DIRS: + continue + for dirname in dirs: + if root == module and dirname in BLACKLIST_DIRS: + continue + path = os.path.join(root, dirname) + if args.exclude and args.exclude.search(path): + continue + check_dirs.add(path) + + for filename in files: + path = os.path.join(root, filename) + if args.exclude and args.exclude.search(path): + continue + if ModuleValidator.is_blacklisted(path): + continue + with ModuleValidator(path, collection=args.collection, collection_version=args.collection_version, + analyze_arg_spec=args.arg_spec, base_branch=args.base_branch, + git_cache=git_cache, reporter=reporter, routing=routing) as mv2: + mv2.validate() + + if not args.collection: + for path in sorted(check_dirs): + pv = PythonPackageValidator(path, reporter=reporter) + pv.validate() + + if args.format == 'plain': + sys.exit(reporter.plain(warnings=args.warnings, output=args.output)) + else: + sys.exit(reporter.json(warnings=args.warnings, output=args.output)) + + +class GitCache: + def __init__(self, base_branch): + self.base_branch = base_branch + + if self.base_branch: + self.base_tree = self._git(['ls-tree', '-r', '--name-only', self.base_branch, 'lib/ansible/modules/']) + else: + self.base_tree = [] + + try: + self.head_tree = self._git(['ls-tree', '-r', '--name-only', 'HEAD', 'lib/ansible/modules/']) + except GitError as ex: + if ex.status == 128: + # fallback when there is no .git directory + self.head_tree = self._get_module_files() + else: + raise + except OSError as ex: + if ex.errno == errno.ENOENT: + # fallback when git is not installed + self.head_tree = self._get_module_files() + else: + raise + + self.base_module_paths = dict((os.path.basename(p), p) for p in self.base_tree if os.path.splitext(p)[1] in ('.py', '.ps1')) + + self.base_module_paths.pop('__init__.py', None) + + self.head_aliased_modules = set() + + for path in self.head_tree: + filename = os.path.basename(path) + + if filename.startswith('_') and filename != '__init__.py': + if os.path.islink(path): + self.head_aliased_modules.add(os.path.basename(os.path.realpath(path))) + + @staticmethod + def _get_module_files(): + module_files = [] + + for (dir_path, dir_names, file_names) in os.walk('lib/ansible/modules/'): + for file_name in file_names: + module_files.append(os.path.join(dir_path, file_name)) + + return module_files + + @staticmethod + def _git(args): + cmd = ['git'] + args + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + stdout, stderr = p.communicate() + if p.returncode != 0: + raise GitError(stderr, p.returncode) + return stdout.decode('utf-8').splitlines() + + +class GitError(Exception): + def __init__(self, message, status): + super(GitError, self).__init__(message) + + self.status = status + + +def main(): + try: + run() + except KeyboardInterrupt: + pass diff --git a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/module_args.py b/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/module_args.py new file mode 100644 index 00000000..ac025291 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/module_args.py @@ -0,0 +1,170 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2016 Matt Martz <matt@sivel.net> +# Copyright (C) 2016 Rackspace US, Inc. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import runpy +import json +import os +import subprocess +import sys + +from contextlib import contextmanager + +from ansible.executor.powershell.module_manifest import PSModuleDepFinder +from ansible.module_utils.basic import FILE_COMMON_ARGUMENTS +from ansible.module_utils.six import reraise +from ansible.module_utils._text import to_bytes, to_text + +from .utils import CaptureStd, find_executable, get_module_name_from_filename + + +class AnsibleModuleCallError(RuntimeError): + pass + + +class AnsibleModuleImportError(ImportError): + pass + + +class AnsibleModuleNotInitialized(Exception): + pass + + +class _FakeAnsibleModuleInit: + def __init__(self): + self.args = tuple() + self.kwargs = {} + self.called = False + + def __call__(self, *args, **kwargs): + self.args = args + self.kwargs = kwargs + self.called = True + raise AnsibleModuleCallError('AnsibleModuleCallError') + + +def _fake_load_params(): + pass + + +@contextmanager +def setup_env(filename): + # Used to clean up imports later + pre_sys_modules = list(sys.modules.keys()) + + fake = _FakeAnsibleModuleInit() + module = __import__('ansible.module_utils.basic').module_utils.basic + _original_init = module.AnsibleModule.__init__ + _original_load_params = module._load_params + setattr(module.AnsibleModule, '__init__', fake) + setattr(module, '_load_params', _fake_load_params) + + try: + yield fake + finally: + setattr(module.AnsibleModule, '__init__', _original_init) + setattr(module, '_load_params', _original_load_params) + + # Clean up imports to prevent issues with mutable data being used in modules + for k in list(sys.modules.keys()): + # It's faster if we limit to items in ansible.module_utils + # But if this causes problems later, we should remove it + if k not in pre_sys_modules and k.startswith('ansible.module_utils.'): + del sys.modules[k] + + +def get_ps_argument_spec(filename, collection): + fqc_name = get_module_name_from_filename(filename, collection) + + pwsh = find_executable('pwsh') + if not pwsh: + raise FileNotFoundError('Required program for PowerShell arg spec inspection "pwsh" not found.') + + module_path = os.path.join(os.getcwd(), filename) + b_module_path = to_bytes(module_path, errors='surrogate_or_strict') + with open(b_module_path, mode='rb') as module_fd: + b_module_data = module_fd.read() + + ps_dep_finder = PSModuleDepFinder() + ps_dep_finder.scan_module(b_module_data, fqn=fqc_name) + + # For ps_argspec.ps1 to compile Ansible.Basic it also needs the AddType module_util. + ps_dep_finder._add_module((b"Ansible.ModuleUtils.AddType", ".psm1", None), wrapper=False) + + util_manifest = json.dumps({ + 'module_path': to_text(module_path, errors='surrogiate_or_strict'), + 'ansible_basic': ps_dep_finder.cs_utils_module["Ansible.Basic"]['path'], + 'ps_utils': dict([(name, info['path']) for name, info in ps_dep_finder.ps_modules.items()]), + }) + + script_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'ps_argspec.ps1') + proc = subprocess.Popen([script_path, util_manifest], stdout=subprocess.PIPE, stderr=subprocess.PIPE, + shell=False) + stdout, stderr = proc.communicate() + + if proc.returncode != 0: + raise AnsibleModuleImportError("STDOUT:\n%s\nSTDERR:\n%s" % (stdout.decode('utf-8'), stderr.decode('utf-8'))) + + kwargs = json.loads(stdout) + + # the validate-modules code expects the options spec to be under the argument_spec key not options as set in PS + kwargs['argument_spec'] = kwargs.pop('options', {}) + + return kwargs['argument_spec'], (), kwargs + + +def get_py_argument_spec(filename, collection): + name = get_module_name_from_filename(filename, collection) + + with setup_env(filename) as fake: + try: + with CaptureStd(): + runpy.run_module(name, run_name='__main__', alter_sys=True) + except AnsibleModuleCallError: + pass + except BaseException as e: + # we want to catch all exceptions here, including sys.exit + reraise(AnsibleModuleImportError, AnsibleModuleImportError('%s' % e), sys.exc_info()[2]) + + if not fake.called: + raise AnsibleModuleNotInitialized() + + try: + # for ping kwargs == {'argument_spec':{'data':{'type':'str','default':'pong'}}, 'supports_check_mode':True} + if 'argument_spec' in fake.kwargs: + argument_spec = fake.kwargs['argument_spec'] + else: + argument_spec = fake.args[0] + # If add_file_common_args is truish, add options from FILE_COMMON_ARGUMENTS when not present. + # This is the only modification to argument_spec done by AnsibleModule itself, and which is + # not caught by setup_env's AnsibleModule replacement + if fake.kwargs.get('add_file_common_args'): + for k, v in FILE_COMMON_ARGUMENTS.items(): + if k not in argument_spec: + argument_spec[k] = v + return argument_spec, fake.args, fake.kwargs + except (TypeError, IndexError): + return {}, (), {} + + +def get_argument_spec(filename, collection): + if filename.endswith('.py'): + return get_py_argument_spec(filename, collection) + else: + return get_ps_argument_spec(filename, collection) diff --git a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/ps_argspec.ps1 b/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/ps_argspec.ps1 new file mode 100755 index 00000000..5ceb9d50 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/ps_argspec.ps1 @@ -0,0 +1,110 @@ +#!/usr/bin/env pwsh +#Requires -Version 6 + +Set-StrictMode -Version 2.0 +$ErrorActionPreference = "Stop" +$WarningPreference = "Stop" + +Function Resolve-CircularReference { + <# + .SYNOPSIS + Removes known types that cause a circular reference in their json serialization. + + .PARAMETER Hash + The hash to scan for circular references + #> + [CmdletBinding()] + param ( + [Parameter(Mandatory=$true)] + [System.Collections.IDictionary] + $Hash + ) + + foreach ($key in [String[]]$Hash.Keys) { + $value = $Hash[$key] + if ($value -is [System.Collections.IDictionary]) { + Resolve-CircularReference -Hash $value + } elseif ($value -is [Array] -or $value -is [System.Collections.IList]) { + $values = @(foreach ($v in $value) { + if ($v -is [System.Collections.IDictionary]) { + Resolve-CircularReference -Hash $v + } + ,$v + }) + $Hash[$key] = $values + } elseif ($value -is [DateTime]) { + $Hash[$key] = $value.ToString("yyyy-MM-dd") + } elseif ($value -is [delegate]) { + # Type can be set to a delegate function which defines it's own type. For the documentation we just + # reflection that as raw + if ($key -eq 'type') { + $Hash[$key] = 'raw' + } else { + $Hash[$key] = $value.ToString() # Shouldn't ever happen but just in case. + } + } + } +} + +$manifest = ConvertFrom-Json -InputObject $args[0] -AsHashtable +if (-not $manifest.Contains('module_path') -or -not $manifest.module_path) { + Write-Error -Message "No module specified." + exit 1 +} +$module_path = $manifest.module_path + +# Check if the path is relative and get the full path to the module +if (-not ([System.IO.Path]::IsPathRooted($module_path))) { + $module_path = $ExecutionContext.SessionState.Path.GetUnresolvedProviderPathFromPSPath($module_path) +} + +if (-not (Test-Path -LiteralPath $module_path -PathType Leaf)) { + Write-Error -Message "The module at '$module_path' does not exist." + exit 1 +} + +$module_code = Get-Content -LiteralPath $module_path -Raw + +$powershell = [PowerShell]::Create() +$powershell.Runspace.SessionStateProxy.SetVariable("ErrorActionPreference", "Stop") + +# Load the PowerShell module utils as the module may be using them to refer to shared module options. Currently we +# can only load the PowerShell utils due to cross platform compatibility issues. +if ($manifest.Contains('ps_utils')) { + foreach ($util_info in $manifest.ps_utils.GetEnumerator()) { + $util_name = $util_info.Key + $util_path = $util_info.Value + + if (-not (Test-Path -LiteralPath $util_path -PathType Leaf)) { + # Failed to find the util path, just silently ignore for now and hope for the best. + continue + } + + $util_sb = [ScriptBlock]::Create((Get-Content -LiteralPath $util_path -Raw)) + $powershell.AddCommand('New-Module').AddParameters(@{ + Name = $util_name + ScriptBlock = $util_sb + }) > $null + $powershell.AddCommand('Import-Module').AddParameter('WarningAction', 'SilentlyContinue') > $null + $powershell.AddCommand('Out-Null').AddStatement() > $null + + # Also import it into the current runspace in case ps_argspec.ps1 needs to use it. + $null = New-Module -Name $util_name -ScriptBlock $util_sb | Import-Module -WarningAction SilentlyContinue + } +} + +Add-CSharpType -References @(Get-Content -LiteralPath $manifest.ansible_basic -Raw) +[Ansible.Basic.AnsibleModule]::_DebugArgSpec = $true + +$powershell.AddScript($module_code) > $null +$powershell.Invoke() > $null + +if ($powershell.HadErrors) { + $powershell.Streams.Error + exit 1 +} + +$arg_spec = $powershell.Runspace.SessionStateProxy.GetVariable('ansibleTestArgSpec') +Resolve-CircularReference -Hash $arg_spec + +ConvertTo-Json -InputObject $arg_spec -Compress -Depth 99 diff --git a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/schema.py b/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/schema.py new file mode 100644 index 00000000..42a2ada4 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/schema.py @@ -0,0 +1,488 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2015, Matt Martz <matt@sivel.net> +# Copyright: (c) 2015, Rackspace US, Inc. +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re + +from distutils.version import StrictVersion +from functools import partial + +from voluptuous import ALLOW_EXTRA, PREVENT_EXTRA, All, Any, Invalid, Length, Required, Schema, Self, ValueInvalid +from ansible.module_utils.six import string_types +from ansible.module_utils.common.collections import is_iterable +from ansible.utils.version import SemanticVersion + +from .utils import parse_isodate + +list_string_types = list(string_types) +tuple_string_types = tuple(string_types) +any_string_types = Any(*string_types) + +# Valid DOCUMENTATION.author lines +# Based on Ansibulbot's extract_github_id() +# author: First Last (@name) [optional anything] +# "Ansible Core Team" - Used by the Bot +# "Michael DeHaan" - nop +# "OpenStack Ansible SIG" - OpenStack does not use GitHub +# "Name (!UNKNOWN)" - For the few untraceable authors +author_line = re.compile(r'^\w.*(\(@([\w-]+)\)|!UNKNOWN)(?![\w.])|^Ansible Core Team$|^Michael DeHaan$|^OpenStack Ansible SIG$') + + +def _add_ansible_error_code(exception, error_code): + setattr(exception, 'ansible_error_code', error_code) + return exception + + +def isodate(v, error_code=None): + try: + parse_isodate(v, allow_date=True) + except ValueError as e: + raise _add_ansible_error_code(Invalid(str(e)), error_code or 'ansible-invalid-date') + return v + + +COLLECTION_NAME_RE = re.compile('^([^.]+.[^.]+)$') + + +def collection_name(v, error_code=None): + if not isinstance(v, string_types): + raise _add_ansible_error_code( + Invalid('Collection name must be a string'), error_code or 'collection-invalid-name') + m = COLLECTION_NAME_RE.match(v) + if not m: + raise _add_ansible_error_code( + Invalid('Collection name must be of format `<namespace>.<name>`'), error_code or 'collection-invalid-name') + return v + + +def version(for_collection=False): + if for_collection: + # We do not accept floats for versions in collections + return Any(*string_types) + return Any(float, *string_types) + + +def date(error_code=None): + return Any(isodate, error_code=error_code) + + +def is_callable(v): + if not callable(v): + raise ValueInvalid('not a valid value') + return v + + +def sequence_of_sequences(min=None, max=None): + return All( + Any( + None, + [Any(list, tuple)], + tuple([Any(list, tuple)]), + ), + Any( + None, + [Length(min=min, max=max)], + tuple([Length(min=min, max=max)]), + ), + ) + + +seealso_schema = Schema( + [ + Any( + { + Required('module'): Any(*string_types), + 'description': Any(*string_types), + }, + { + Required('ref'): Any(*string_types), + Required('description'): Any(*string_types), + }, + { + Required('name'): Any(*string_types), + Required('link'): Any(*string_types), + Required('description'): Any(*string_types), + }, + ), + ] +) + + +argument_spec_types = ['bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', + 'sid', 'str'] + + +argument_spec_modifiers = { + 'mutually_exclusive': sequence_of_sequences(min=2), + 'required_together': sequence_of_sequences(min=2), + 'required_one_of': sequence_of_sequences(min=2), + 'required_if': sequence_of_sequences(min=3, max=4), + 'required_by': Schema({str: Any(list_string_types, tuple_string_types, *string_types)}), +} + + +def no_required_with_default(v): + if v.get('default') and v.get('required'): + raise Invalid('required=True cannot be supplied with a default') + return v + + +def elements_with_list(v): + if v.get('elements') and v.get('type') != 'list': + raise Invalid('type must be list to use elements') + return v + + +def options_with_apply_defaults(v): + if v.get('apply_defaults') and not v.get('options'): + raise Invalid('apply_defaults=True requires options to be set') + return v + + +def option_deprecation(v): + if v.get('removed_in_version') or v.get('removed_at_date'): + if v.get('removed_in_version') and v.get('removed_at_date'): + raise _add_ansible_error_code( + Invalid('Only one of removed_in_version and removed_at_date must be specified'), + error_code='deprecation-either-date-or-version') + if not v.get('removed_from_collection'): + raise _add_ansible_error_code( + Invalid('If removed_in_version or removed_at_date is specified, ' + 'removed_from_collection must be specified as well'), + error_code='deprecation-collection-missing') + return + if v.get('removed_from_collection'): + raise Invalid('removed_from_collection cannot be specified without either ' + 'removed_in_version or removed_at_date') + + +def argument_spec_schema(for_collection): + any_string_types = Any(*string_types) + schema = { + any_string_types: { + 'type': Any(is_callable, *argument_spec_types), + 'elements': Any(*argument_spec_types), + 'default': object, + 'fallback': Any( + (is_callable, list_string_types), + [is_callable, list_string_types], + ), + 'choices': Any([object], (object,)), + 'required': bool, + 'no_log': bool, + 'aliases': Any(list_string_types, tuple(list_string_types)), + 'apply_defaults': bool, + 'removed_in_version': version(for_collection), + 'removed_at_date': date(), + 'removed_from_collection': collection_name, + 'options': Self, + 'deprecated_aliases': Any([Any( + { + Required('name'): Any(*string_types), + Required('date'): date(), + Required('collection_name'): collection_name, + }, + { + Required('name'): Any(*string_types), + Required('version'): version(for_collection), + Required('collection_name'): collection_name, + }, + )]), + } + } + schema[any_string_types].update(argument_spec_modifiers) + schemas = All( + schema, + Schema({any_string_types: no_required_with_default}), + Schema({any_string_types: elements_with_list}), + Schema({any_string_types: options_with_apply_defaults}), + Schema({any_string_types: option_deprecation}), + ) + return Schema(schemas) + + +def ansible_module_kwargs_schema(for_collection): + schema = { + 'argument_spec': argument_spec_schema(for_collection), + 'bypass_checks': bool, + 'no_log': bool, + 'check_invalid_arguments': Any(None, bool), + 'add_file_common_args': bool, + 'supports_check_mode': bool, + } + schema.update(argument_spec_modifiers) + return Schema(schema) + + +json_value = Schema(Any( + None, + int, + float, + [Self], + *(list({str_type: Self} for str_type in string_types) + list(string_types)) +)) + + +def version_added(v, error_code='version-added-invalid', accept_historical=False): + if 'version_added' in v: + version_added = v.get('version_added') + if isinstance(version_added, string_types): + # If it is not a string, schema validation will have already complained + # - or we have a float and we are in ansible/ansible, in which case we're + # also happy. + if v.get('version_added_collection') == 'ansible.builtin': + if version_added == 'historical' and accept_historical: + return v + try: + version = StrictVersion() + version.parse(version_added) + except ValueError as exc: + raise _add_ansible_error_code( + Invalid('version_added (%r) is not a valid ansible-base version: ' + '%s' % (version_added, exc)), + error_code=error_code) + else: + try: + version = SemanticVersion() + version.parse(version_added) + except ValueError as exc: + raise _add_ansible_error_code( + Invalid('version_added (%r) is not a valid collection version ' + '(see specification at https://semver.org/): ' + '%s' % (version_added, exc)), + error_code=error_code) + elif 'version_added_collection' in v: + # Must have been manual intervention, since version_added_collection is only + # added automatically when version_added is present + raise Invalid('version_added_collection cannot be specified without version_added') + return v + + +def list_dict_option_schema(for_collection): + suboption_schema = Schema( + { + Required('description'): Any(list_string_types, *string_types), + 'required': bool, + 'choices': list, + 'aliases': Any(list_string_types), + 'version_added': version(for_collection), + 'version_added_collection': collection_name, + 'default': json_value, + # Note: Types are strings, not literal bools, such as True or False + 'type': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'), + # in case of type='list' elements define type of individual item in list + 'elements': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'), + # Recursive suboptions + 'suboptions': Any(None, *list({str_type: Self} for str_type in string_types)), + }, + extra=PREVENT_EXTRA + ) + + # This generates list of dicts with keys from string_types and suboption_schema value + # for example in Python 3: {str: suboption_schema} + list_dict_suboption_schema = [{str_type: suboption_schema} for str_type in string_types] + + option_schema = Schema( + { + Required('description'): Any(list_string_types, *string_types), + 'required': bool, + 'choices': list, + 'aliases': Any(list_string_types), + 'version_added': version(for_collection), + 'version_added_collection': collection_name, + 'default': json_value, + 'suboptions': Any(None, *list_dict_suboption_schema), + # Note: Types are strings, not literal bools, such as True or False + 'type': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'), + # in case of type='list' elements define type of individual item in list + 'elements': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'), + }, + extra=PREVENT_EXTRA + ) + + option_version_added = Schema( + All({ + 'suboptions': Any(None, *[{str_type: Self} for str_type in string_types]), + }, partial(version_added, error_code='option-invalid-version-added')), + extra=ALLOW_EXTRA + ) + + # This generates list of dicts with keys from string_types and option_schema value + # for example in Python 3: {str: option_schema} + return [{str_type: All(option_schema, option_version_added)} for str_type in string_types] + + +def return_contains(v): + schema = Schema( + { + Required('contains'): Any(dict, list, *string_types) + }, + extra=ALLOW_EXTRA + ) + if v.get('type') == 'complex': + return schema(v) + return v + + +def return_schema(for_collection): + return_contains_schema = Any( + All( + Schema( + { + Required('description'): Any(list_string_types, *string_types), + 'returned': Any(*string_types), # only returned on top level + Required('type'): Any('bool', 'complex', 'dict', 'float', 'int', 'list', 'str'), + 'version_added': version(for_collection), + 'version_added_collection': collection_name, + 'sample': json_value, + 'example': json_value, + 'contains': Any(None, *list({str_type: Self} for str_type in string_types)), + # in case of type='list' elements define type of individual item in list + 'elements': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'), + } + ), + Schema(return_contains), + Schema(partial(version_added, error_code='option-invalid-version-added')), + ), + Schema(type(None)), + ) + + # This generates list of dicts with keys from string_types and return_contains_schema value + # for example in Python 3: {str: return_contains_schema} + list_dict_return_contains_schema = [{str_type: return_contains_schema} for str_type in string_types] + + return Any( + All( + Schema( + { + any_string_types: { + Required('description'): Any(list_string_types, *string_types), + Required('returned'): Any(*string_types), + Required('type'): Any('bool', 'complex', 'dict', 'float', 'int', 'list', 'str'), + 'version_added': version(for_collection), + 'version_added_collection': collection_name, + 'sample': json_value, + 'example': json_value, + 'contains': Any(None, *list_dict_return_contains_schema), + # in case of type='list' elements define type of individual item in list + 'elements': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'), + } + } + ), + Schema({any_string_types: return_contains}), + Schema({any_string_types: partial(version_added, error_code='option-invalid-version-added')}), + ), + Schema(type(None)), + ) + + +def deprecation_schema(for_collection): + main_fields = { + Required('why'): Any(*string_types), + Required('alternative'): Any(*string_types), + Required('removed_from_collection'): collection_name, + 'removed': Any(True), + } + + date_schema = { + Required('removed_at_date'): date(), + } + date_schema.update(main_fields) + + if for_collection: + version_schema = { + Required('removed_in'): version(for_collection), + } + else: + version_schema = { + # Only list branches that are deprecated or may have docs stubs in + # Deprecation cycle changed at 2.4 (though not retroactively) + # 2.3 -> removed_in: "2.5" + n for docs stub + # 2.4 -> removed_in: "2.8" + n for docs stub + Required('removed_in'): Any( + "2.2", "2.3", "2.4", "2.5", "2.6", "2.8", "2.9", "2.10", "2.11", "2.12", "2.13", "2.14"), + } + version_schema.update(main_fields) + + return Any( + Schema(version_schema, extra=PREVENT_EXTRA), + Schema(date_schema, extra=PREVENT_EXTRA), + ) + + +def author(value): + if value is None: + return value # let schema checks handle + + if not is_iterable(value): + value = [value] + + for line in value: + if not isinstance(line, string_types): + continue # let schema checks handle + m = author_line.search(line) + if not m: + raise Invalid("Invalid author") + + return value + + +def doc_schema(module_name, for_collection=False, deprecated_module=False): + + if module_name.startswith('_'): + module_name = module_name[1:] + deprecated_module = True + doc_schema_dict = { + Required('module'): module_name, + Required('short_description'): Any(*string_types), + Required('description'): Any(list_string_types, *string_types), + Required('author'): All(Any(None, list_string_types, *string_types), author), + 'notes': Any(None, list_string_types), + 'seealso': Any(None, seealso_schema), + 'requirements': list_string_types, + 'todo': Any(None, list_string_types, *string_types), + 'options': Any(None, *list_dict_option_schema(for_collection)), + 'extends_documentation_fragment': Any(list_string_types, *string_types), + 'version_added_collection': collection_name, + } + + if for_collection: + # Optional + doc_schema_dict['version_added'] = version(for_collection=True) + else: + doc_schema_dict[Required('version_added')] = version(for_collection=False) + + if deprecated_module: + deprecation_required_scheme = { + Required('deprecated'): Any(deprecation_schema(for_collection=for_collection)), + } + + doc_schema_dict.update(deprecation_required_scheme) + return Schema( + All( + Schema( + doc_schema_dict, + extra=PREVENT_EXTRA + ), + partial(version_added, error_code='module-invalid-version-added', accept_historical=not for_collection), + ) + ) + + +# Things to add soon +#################### +# 1) Recursively validate `type: complex` fields +# This will improve documentation, though require fair amount of module tidyup + +# Possible Future Enhancements +############################## + +# 1) Don't allow empty options for choices, aliases, etc +# 2) If type: bool ensure choices isn't set - perhaps use Exclusive +# 3) both version_added should be quoted floats + +# Tool that takes JSON and generates RETURN skeleton (needs to support complex structures) diff --git a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/utils.py b/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/utils.py new file mode 100644 index 00000000..939ae651 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/utils.py @@ -0,0 +1,218 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2015 Matt Martz <matt@sivel.net> +# Copyright (C) 2015 Rackspace US, Inc. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import ast +import datetime +import os +import re +import sys + +from io import BytesIO, TextIOWrapper + +import yaml +import yaml.reader + +from ansible.module_utils._text import to_text +from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.six import string_types + + +class AnsibleTextIOWrapper(TextIOWrapper): + def write(self, s): + super(AnsibleTextIOWrapper, self).write(to_text(s, self.encoding, errors='replace')) + + +def find_executable(executable, cwd=None, path=None): + """Finds the full path to the executable specified""" + match = None + real_cwd = os.getcwd() + + if not cwd: + cwd = real_cwd + + if os.path.dirname(executable): + target = os.path.join(cwd, executable) + if os.path.exists(target) and os.access(target, os.F_OK | os.X_OK): + match = executable + else: + path = os.environ.get('PATH', os.path.defpath) + + path_dirs = path.split(os.path.pathsep) + seen_dirs = set() + + for path_dir in path_dirs: + if path_dir in seen_dirs: + continue + + seen_dirs.add(path_dir) + + if os.path.abspath(path_dir) == real_cwd: + path_dir = cwd + + candidate = os.path.join(path_dir, executable) + + if os.path.exists(candidate) and os.access(candidate, os.F_OK | os.X_OK): + match = candidate + break + + return match + + +def find_globals(g, tree): + """Uses AST to find globals in an ast tree""" + for child in tree: + if hasattr(child, 'body') and isinstance(child.body, list): + find_globals(g, child.body) + elif isinstance(child, (ast.FunctionDef, ast.ClassDef)): + g.add(child.name) + continue + elif isinstance(child, ast.Assign): + try: + g.add(child.targets[0].id) + except (IndexError, AttributeError): + pass + elif isinstance(child, ast.Import): + g.add(child.names[0].name) + elif isinstance(child, ast.ImportFrom): + for name in child.names: + g_name = name.asname or name.name + if g_name == '*': + continue + g.add(g_name) + + +class CaptureStd(): + """Context manager to handle capturing stderr and stdout""" + + def __enter__(self): + self.sys_stdout = sys.stdout + self.sys_stderr = sys.stderr + sys.stdout = self.stdout = AnsibleTextIOWrapper(BytesIO(), encoding=self.sys_stdout.encoding) + sys.stderr = self.stderr = AnsibleTextIOWrapper(BytesIO(), encoding=self.sys_stderr.encoding) + return self + + def __exit__(self, exc_type, exc_value, traceback): + sys.stdout = self.sys_stdout + sys.stderr = self.sys_stderr + + def get(self): + """Return ``(stdout, stderr)``""" + + return self.stdout.buffer.getvalue(), self.stderr.buffer.getvalue() + + +def get_module_name_from_filename(filename, collection): + # Calculate the module's name so that relative imports work correctly + if collection: + # collection is a relative path, example: ansible_collections/my_namespace/my_collection + # filename is a relative path, example: plugins/modules/my_module.py + path = os.path.join(collection, filename) + else: + # filename is a relative path, example: lib/ansible/modules/system/ping.py + path = os.path.relpath(filename, 'lib') + + name = os.path.splitext(path)[0].replace(os.path.sep, '.') + + return name + + +def parse_yaml(value, lineno, module, name, load_all=False): + traces = [] + errors = [] + data = None + + if load_all: + loader = yaml.safe_load_all + else: + loader = yaml.safe_load + + try: + data = loader(value) + if load_all: + data = list(data) + except yaml.MarkedYAMLError as e: + e.problem_mark.line += lineno - 1 + e.problem_mark.name = '%s.%s' % (module, name) + errors.append({ + 'msg': '%s is not valid YAML' % name, + 'line': e.problem_mark.line + 1, + 'column': e.problem_mark.column + 1 + }) + traces.append(e) + except yaml.reader.ReaderError as e: + traces.append(e) + # TODO: Better line/column detection + errors.append({ + 'msg': ('%s is not valid YAML. Character ' + '0x%x at position %d.' % (name, e.character, e.position)), + 'line': lineno + }) + except yaml.YAMLError as e: + traces.append(e) + errors.append({ + 'msg': '%s is not valid YAML: %s: %s' % (name, type(e), e), + 'line': lineno + }) + + return data, errors, traces + + +def is_empty(value): + """Evaluate null like values excluding False""" + if value is False: + return False + return not bool(value) + + +def compare_unordered_lists(a, b): + """Safe list comparisons + + Supports: + - unordered lists + - unhashable elements + """ + return len(a) == len(b) and all(x in b for x in a) + + +class NoArgsAnsibleModule(AnsibleModule): + """AnsibleModule that does not actually load params. This is used to get access to the + methods within AnsibleModule without having to fake a bunch of data + """ + def _load_params(self): + self.params = {'_ansible_selinux_special_fs': [], '_ansible_remote_tmp': '/tmp', '_ansible_keep_remote_files': False, '_ansible_check_mode': False} + + +def parse_isodate(v, allow_date): + if allow_date: + if isinstance(v, datetime.date): + return v + msg = 'Expected ISO 8601 date string (YYYY-MM-DD) or YAML date' + else: + msg = 'Expected ISO 8601 date string (YYYY-MM-DD)' + if not isinstance(v, string_types): + raise ValueError(msg) + # From Python 3.7 in, there is datetime.date.fromisoformat(). For older versions, + # we have to do things manually. + if not re.match('^[0-9]{4}-[0-9]{2}-[0-9]{2}$', v): + raise ValueError(msg) + try: + return datetime.datetime.strptime(v, '%Y-%m-%d').date() + except ValueError: + raise ValueError(msg) diff --git a/test/lib/ansible_test/_data/sanity/yamllint/config/default.yml b/test/lib/ansible_test/_data/sanity/yamllint/config/default.yml new file mode 100644 index 00000000..45d8b7ad --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/yamllint/config/default.yml @@ -0,0 +1,19 @@ +extends: default + +rules: + braces: {max-spaces-inside: 1, level: error} + brackets: {max-spaces-inside: 1, level: error} + colons: {max-spaces-after: -1, level: error} + commas: {max-spaces-after: -1, level: error} + comments: disable + comments-indentation: disable + document-start: disable + empty-lines: {max: 3, level: error} + hyphens: {level: error} + indentation: disable + key-duplicates: enable + line-length: disable + new-line-at-end-of-file: disable + new-lines: {type: unix} + trailing-spaces: disable + truthy: disable diff --git a/test/lib/ansible_test/_data/sanity/yamllint/config/modules.yml b/test/lib/ansible_test/_data/sanity/yamllint/config/modules.yml new file mode 100644 index 00000000..da7e6049 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/yamllint/config/modules.yml @@ -0,0 +1,19 @@ +extends: default + +rules: + braces: disable + brackets: disable + colons: disable + commas: disable + comments: disable + comments-indentation: disable + document-start: disable + empty-lines: disable + hyphens: disable + indentation: disable + key-duplicates: enable + line-length: disable + new-line-at-end-of-file: disable + new-lines: {type: unix} + trailing-spaces: disable + truthy: disable diff --git a/test/lib/ansible_test/_data/sanity/yamllint/config/plugins.yml b/test/lib/ansible_test/_data/sanity/yamllint/config/plugins.yml new file mode 100644 index 00000000..6d418137 --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/yamllint/config/plugins.yml @@ -0,0 +1,19 @@ +extends: default + +rules: + braces: disable + brackets: disable + colons: disable + commas: disable + comments: disable + comments-indentation: disable + document-start: disable + empty-lines: disable + hyphens: disable + indentation: disable + key-duplicates: disable + line-length: disable + new-line-at-end-of-file: disable + new-lines: {type: unix} + trailing-spaces: disable + truthy: disable diff --git a/test/lib/ansible_test/_data/sanity/yamllint/yamllinter.py b/test/lib/ansible_test/_data/sanity/yamllint/yamllinter.py new file mode 100644 index 00000000..c9cdc19c --- /dev/null +++ b/test/lib/ansible_test/_data/sanity/yamllint/yamllinter.py @@ -0,0 +1,249 @@ +#!/usr/bin/env python +"""Wrapper around yamllint that supports YAML embedded in Ansible modules.""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import ast +import json +import os +import sys + +import yaml +from yaml.resolver import Resolver +from yaml.constructor import SafeConstructor +from yaml.error import MarkedYAMLError +from _yaml import CParser # pylint: disable=no-name-in-module + +from yamllint import linter +from yamllint.config import YamlLintConfig + + +def main(): + """Main program body.""" + paths = sys.argv[1:] or sys.stdin.read().splitlines() + + checker = YamlChecker() + checker.check(paths) + checker.report() + + +class TestConstructor(SafeConstructor): + """Yaml Safe Constructor that knows about Ansible tags""" + + +TestConstructor.add_constructor( + u'!unsafe', + TestConstructor.construct_yaml_str) + + +TestConstructor.add_constructor( + u'!vault', + TestConstructor.construct_yaml_str) + + +TestConstructor.add_constructor( + u'!vault-encrypted', + TestConstructor.construct_yaml_str) + + +class TestLoader(CParser, TestConstructor, Resolver): + def __init__(self, stream): + CParser.__init__(self, stream) + TestConstructor.__init__(self) + Resolver.__init__(self) + + +class YamlChecker: + """Wrapper around yamllint that supports YAML embedded in Ansible modules.""" + def __init__(self): + self.messages = [] + + def report(self): + """Print yamllint report to stdout.""" + report = dict( + messages=self.messages, + ) + + print(json.dumps(report, indent=4, sort_keys=True)) + + def check(self, paths): + """ + :type paths: str + """ + config_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config') + + yaml_conf = YamlLintConfig(file=os.path.join(config_path, 'default.yml')) + module_conf = YamlLintConfig(file=os.path.join(config_path, 'modules.yml')) + plugin_conf = YamlLintConfig(file=os.path.join(config_path, 'plugins.yml')) + + for path in paths: + extension = os.path.splitext(path)[1] + + with open(path) as f: + contents = f.read() + + if extension in ('.yml', '.yaml'): + self.check_yaml(yaml_conf, path, contents) + elif extension == '.py': + if path.startswith('lib/ansible/modules/') or path.startswith('plugins/modules/'): + conf = module_conf + else: + conf = plugin_conf + + self.check_module(conf, path, contents) + else: + raise Exception('unsupported extension: %s' % extension) + + def check_yaml(self, conf, path, contents): + """ + :type conf: YamlLintConfig + :type path: str + :type contents: str + """ + self.check_parsable(path, contents) + self.messages += [self.result_to_message(r, path) for r in linter.run(contents, conf, path)] + + def check_module(self, conf, path, contents): + """ + :type conf: YamlLintConfig + :type path: str + :type contents: str + """ + docs = self.get_module_docs(path, contents) + + for key, value in docs.items(): + yaml_data = value['yaml'] + lineno = value['lineno'] + + if yaml_data.startswith('\n'): + yaml_data = yaml_data[1:] + lineno += 1 + + self.check_parsable(path, yaml_data, lineno) + + messages = list(linter.run(yaml_data, conf, path)) + + self.messages += [self.result_to_message(r, path, lineno - 1, key) for r in messages] + + def check_parsable(self, path, contents, lineno=1): + """ + :type path: str + :type contents: str + :type lineno: int + """ + try: + yaml.load(contents, Loader=TestLoader) + except MarkedYAMLError as e: + self.messages += [{'code': 'unparsable-with-libyaml', + 'message': '%s - %s' % (e.args[0], e.args[2]), + 'path': path, + 'line': e.problem_mark.line + lineno, + 'column': e.problem_mark.column + 1, + 'level': 'error', + }] + + @staticmethod + def result_to_message(result, path, line_offset=0, prefix=''): + """ + :type result: any + :type path: str + :type line_offset: int + :type prefix: str + :rtype: dict[str, any] + """ + if prefix: + prefix = '%s: ' % prefix + + return dict( + code=result.rule or result.level, + message=prefix + result.desc, + path=path, + line=result.line + line_offset, + column=result.column, + level=result.level, + ) + + def get_module_docs(self, path, contents): + """ + :type path: str + :type contents: str + :rtype: dict[str, any] + """ + module_doc_types = [ + 'DOCUMENTATION', + 'EXAMPLES', + 'RETURN', + ] + + docs = {} + + def check_assignment(statement, doc_types=None): + """Check the given statement for a documentation assignment.""" + for target in statement.targets: + if not isinstance(target, ast.Name): + continue + + if doc_types and target.id not in doc_types: + continue + + docs[target.id] = dict( + yaml=statement.value.s, + lineno=statement.lineno, + end_lineno=statement.lineno + len(statement.value.s.splitlines()) + ) + + module_ast = self.parse_module(path, contents) + + if not module_ast: + return {} + + is_plugin = path.startswith('lib/ansible/modules/') or path.startswith('lib/ansible/plugins/') or path.startswith('plugins/') + is_doc_fragment = path.startswith('lib/ansible/plugins/doc_fragments/') or path.startswith('plugins/doc_fragments/') + + if is_plugin and not is_doc_fragment: + for body_statement in module_ast.body: + if isinstance(body_statement, ast.Assign): + check_assignment(body_statement, module_doc_types) + elif is_doc_fragment: + for body_statement in module_ast.body: + if isinstance(body_statement, ast.ClassDef): + for class_statement in body_statement.body: + if isinstance(class_statement, ast.Assign): + check_assignment(class_statement) + else: + raise Exception('unsupported path: %s' % path) + + return docs + + def parse_module(self, path, contents): + """ + :type path: str + :type contents: str + :rtype: ast.Module | None + """ + try: + return ast.parse(contents) + except SyntaxError as ex: + self.messages.append(dict( + code='python-syntax-error', + message=str(ex), + path=path, + line=ex.lineno, + column=ex.offset, + level='error', + )) + except Exception as ex: # pylint: disable=broad-except + self.messages.append(dict( + code='python-parse-error', + message=str(ex), + path=path, + line=0, + column=0, + level='error', + )) + + return None + + +if __name__ == '__main__': + main() |