summaryrefslogtreecommitdiffstats
path: root/hacking
diff options
context:
space:
mode:
Diffstat (limited to 'hacking')
-rwxr-xr-xhacking/build-ansible.py103
-rw-r--r--hacking/build_library/__init__.py0
-rw-r--r--hacking/build_library/build_ansible/__init__.py0
-rw-r--r--hacking/build_library/build_ansible/announce.py293
-rw-r--r--hacking/build_library/build_ansible/change_detection.py33
-rw-r--r--hacking/build_library/build_ansible/command_plugins/collection_meta.py72
-rw-r--r--hacking/build_library/build_ansible/command_plugins/docs_build.py255
-rw-r--r--hacking/build_library/build_ansible/command_plugins/dump_config.py82
-rw-r--r--hacking/build_library/build_ansible/command_plugins/dump_keywords.py121
-rw-r--r--hacking/build_library/build_ansible/command_plugins/file_deprecated_issues.py153
-rw-r--r--hacking/build_library/build_ansible/command_plugins/generate_man.py303
-rw-r--r--hacking/build_library/build_ansible/command_plugins/porting_guide.py138
-rw-r--r--hacking/build_library/build_ansible/command_plugins/release_announcement.py78
-rw-r--r--hacking/build_library/build_ansible/command_plugins/update_intersphinx.py101
-rw-r--r--hacking/build_library/build_ansible/commands.py50
-rw-r--r--hacking/build_library/build_ansible/errors.py19
-rwxr-xr-xhacking/test-module.py292
-rwxr-xr-xhacking/update-sanity-requirements.py112
18 files changed, 2205 insertions, 0 deletions
diff --git a/hacking/build-ansible.py b/hacking/build-ansible.py
new file mode 100755
index 0000000..c108c18
--- /dev/null
+++ b/hacking/build-ansible.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python
+# coding: utf-8
+# PYTHON_ARGCOMPLETE_OK
+# Copyright: (c) 2019, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+import argparse
+import os.path
+import sys
+
+from straight.plugin import load
+
+try:
+ import argcomplete
+except ImportError:
+ argcomplete = None
+
+
+def build_lib_path(this_script=__file__):
+ """Return path to the common build library directory."""
+ hacking_dir = os.path.dirname(this_script)
+ libdir = os.path.abspath(os.path.join(hacking_dir, 'build_library'))
+
+ return libdir
+
+
+def ansible_lib_path(this_script=__file__):
+ """Return path to the common build library directory."""
+ hacking_dir = os.path.dirname(this_script)
+ libdir = os.path.abspath(os.path.join(hacking_dir, '..', 'lib'))
+
+ return libdir
+
+
+sys.path.insert(0, ansible_lib_path())
+sys.path.insert(0, build_lib_path())
+
+
+from build_ansible import commands, errors
+
+
+def create_arg_parser(program_name):
+ """
+ Creates a command line argument parser
+
+ :arg program_name: The name of the script. Used in help texts
+ """
+ parser = argparse.ArgumentParser(prog=program_name,
+ description="Implements utilities to build Ansible")
+ return parser
+
+
+def main():
+ """
+ Start our run.
+
+ "It all starts here"
+ """
+ subcommands = load('build_ansible.command_plugins', subclasses=commands.Command)
+
+ arg_parser = create_arg_parser(os.path.basename(sys.argv[0]))
+ arg_parser.add_argument('--debug', dest='debug', required=False, default=False,
+ action='store_true',
+ help='Show tracebacks and other debugging information')
+ subparsers = arg_parser.add_subparsers(title='Subcommands', dest='command',
+ help='for help use build-ansible.py SUBCOMMANDS -h')
+ subcommands.pipe('init_parser', subparsers.add_parser)
+
+ if argcomplete:
+ argcomplete.autocomplete(arg_parser)
+
+ args = arg_parser.parse_args(sys.argv[1:])
+ if args.command is None:
+ print('Please specify a subcommand to run')
+ sys.exit(1)
+
+ for subcommand in subcommands:
+ if subcommand.name == args.command:
+ command = subcommand
+ break
+ else:
+ # Note: We should never trigger this because argparse should shield us from it
+ print('Error: {0} was not a recognized subcommand'.format(args.command))
+ sys.exit(1)
+
+ try:
+ retval = command.main(args)
+ except (errors.DependencyError, errors.MissingUserInput, errors.InvalidUserInput) as e:
+ print(e)
+ if args.debug:
+ raise
+ sys.exit(2)
+
+ sys.exit(retval)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/hacking/build_library/__init__.py b/hacking/build_library/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/hacking/build_library/__init__.py
diff --git a/hacking/build_library/build_ansible/__init__.py b/hacking/build_library/build_ansible/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/hacking/build_library/build_ansible/__init__.py
diff --git a/hacking/build_library/build_ansible/announce.py b/hacking/build_library/build_ansible/announce.py
new file mode 100644
index 0000000..c245bfb
--- /dev/null
+++ b/hacking/build_library/build_ansible/announce.py
@@ -0,0 +1,293 @@
+# coding: utf-8
+# Copyright: (c) 2019, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+import asyncio
+import datetime
+import hashlib
+
+import aiohttp
+from jinja2 import Environment, DictLoader
+
+
+VERSION_FRAGMENT = """
+{%- if versions | length > 1 %}
+ {% for version in versions %}
+ {% if loop.last %}and {{ pretty_version(version) }}{% else %}
+ {% if versions | length == 2 %}{{ pretty_version(version) }} {% else %}{{ pretty_version(version) }}, {% endif -%}
+ {% endif -%}
+ {% endfor -%}
+{%- else %}{{ pretty_version(versions[0]) }}{% endif -%}
+"""
+
+LONG_TEMPLATE = """
+{% set plural = False if versions | length == 1 else True %}
+{% set latest_ver = (versions | sort(attribute='ver_obj'))[-1] %}
+
+To: ansible-releases@redhat.com, ansible-devel@googlegroups.com, ansible-project@googlegroups.com, ansible-announce@googlegroups.com
+Subject: New release{% if plural %}s{% endif %}: {{ version_str }}
+
+{% filter wordwrap %}
+Hi all- we're happy to announce that the general release of {{ version_str }}{% if plural %} are{%- else %} is{%- endif %} now available!
+{% endfilter %}
+
+
+
+How to get it
+-------------
+
+{% for version in versions %}
+$ pip install ansible{% if is_ansible_base(version) %}-base{% endif %}=={{ version }} --user
+{% if not loop.last %}
+or
+{% endif %}
+{% endfor %}
+
+The tar.gz of the release{% if plural %}s{% endif %} can be found here:
+
+{% for version in versions %}
+* {{ pretty_version(version) }}
+{% if is_ansible_base(version) %}
+ https://pypi.python.org/packages/source/a/ansible-base/ansible-base-{{ version }}.tar.gz
+{% else %}
+ https://pypi.python.org/packages/source/a/ansible/ansible-{{ version }}.tar.gz
+{% endif %}
+ SHA256: {{ hashes[version] }}
+{% endfor %}
+
+
+What's new in {{ version_str }}
+{{ '-' * (14 + version_str | length) }}
+
+{% filter wordwrap %}
+{% if plural %}These releases are{% else %}This release is a{% endif %} maintenance release{% if plural %}s{% endif %} containing numerous bugfixes. The full {% if plural %} changelogs are{% else %} changelog is{% endif %} at:
+{% endfilter %}
+
+
+{% for version in versions %}
+* {{ version }}
+ https://github.com/ansible/ansible/blob/stable-{{ version.split('.')[:2] | join('.') }}/changelogs/CHANGELOG-v{{ version.split('.')[:2] | join('.') }}.rst
+{% endfor %}
+
+
+What's the schedule for future maintenance releases?
+----------------------------------------------------
+
+{% filter wordwrap %}
+Future maintenance releases will occur approximately every 3 weeks. So expect the next one around {{ next_release.strftime('%Y-%m-%d') }}.
+{% endfilter %}
+
+
+
+Porting Help
+------------
+
+{% filter wordwrap %}
+We've published a porting guide at
+https://docs.ansible.com/ansible/devel/porting_guides/porting_guide_{{ latest_ver.split('.')[:2] | join('.') }}.html to help migrate your content to {{ latest_ver.split('.')[:2] | join('.') }}.
+{% endfilter %}
+
+
+
+{% filter wordwrap %}
+If you discover any errors or if any of your working playbooks break when you upgrade to {{ latest_ver }}, please use the following link to report the regression:
+{% endfilter %}
+
+
+ https://github.com/ansible/ansible/issues/new/choose
+
+{% filter wordwrap %}
+In your issue, be sure to mention the version that works and the one that doesn't.
+{% endfilter %}
+
+
+Thanks!
+
+-{{ name }}
+
+""" # noqa for E501 (line length).
+# jinja2 is horrid about getting rid of extra newlines so we have to have a single per paragraph for
+# proper wrapping to occur
+
+SHORT_TEMPLATE = """
+{% set plural = False if versions | length == 1 else True %}
+{% set version = (versions|sort(attribute='ver_obj'))[-1] %}
+@ansible
+{{ version_str }}
+{% if plural %}
+ have
+{% else %}
+ has
+{% endif %}
+been released! Get
+{% if plural %}
+them
+{% else %}
+it
+{% endif %}
+on PyPI: pip install ansible{% if is_ansible_base(version) %}-base{% endif %}=={{ version }},
+the Ansible PPA on Launchpad, or GitHub. Happy automating!
+""" # noqa for E501 (line length).
+# jinja2 is horrid about getting rid of extra newlines so we have to have a single per paragraph for
+# proper wrapping to occur
+
+JINJA_ENV = Environment(
+ loader=DictLoader({'long': LONG_TEMPLATE,
+ 'short': SHORT_TEMPLATE,
+ 'version_string': VERSION_FRAGMENT,
+ }),
+ extensions=['jinja2.ext.i18n'],
+ trim_blocks=True,
+ lstrip_blocks=True,
+)
+
+
+async def calculate_hash_from_tarball(session, version):
+ tar_url = f'https://pypi.python.org/packages/source/a/ansible-base/ansible-base-{version}.tar.gz'
+ tar_task = asyncio.create_task(session.get(tar_url))
+ tar_response = await tar_task
+
+ tar_hash = hashlib.sha256()
+ while True:
+ chunk = await tar_response.content.read(1024)
+ if not chunk:
+ break
+ tar_hash.update(chunk)
+
+ return tar_hash.hexdigest()
+
+
+async def parse_hash_from_file(session, version):
+ filename = f'ansible-base-{version}.tar.gz'
+ hash_url = f'https://releases.ansible.com/ansible-base/{filename}.sha'
+ hash_task = asyncio.create_task(session.get(hash_url))
+ hash_response = await hash_task
+
+ hash_content = await hash_response.read()
+ precreated_hash, precreated_filename = hash_content.split(None, 1)
+ if filename != precreated_filename.strip().decode('utf-8'):
+ raise ValueError(f'Hash file contains hash for a different file: {precreated_filename}')
+
+ return precreated_hash.decode('utf-8')
+
+
+async def get_hash(session, version):
+ calculated_hash = await calculate_hash_from_tarball(session, version)
+ precreated_hash = await parse_hash_from_file(session, version)
+
+ if calculated_hash != precreated_hash:
+ raise ValueError(f'Hash in file ansible-base-{version}.tar.gz.sha {precreated_hash} does not'
+ f' match hash of tarball from pypi {calculated_hash}')
+
+ return calculated_hash
+
+
+async def get_hashes(versions):
+ hashes = {}
+ requestors = {}
+ async with aiohttp.ClientSession() as aio_session:
+ for version in versions:
+ requestors[version] = asyncio.create_task(get_hash(aio_session, version))
+
+ for version, request in requestors.items():
+ await request
+ hashes[version] = request.result()
+
+ return hashes
+
+
+def next_release_date(weeks=3):
+ days_in_the_future = weeks * 7
+ today = datetime.datetime.now()
+ numeric_today = today.weekday()
+
+ # We release on Thursdays
+ if numeric_today == 3:
+ # 3 is Thursday
+ pass
+ elif numeric_today == 4:
+ # If this is Friday, we can adjust back to Thursday for the next release
+ today -= datetime.timedelta(days=1)
+ elif numeric_today < 3:
+ # Otherwise, slide forward to Thursday
+ today += datetime.timedelta(days=(3 - numeric_today))
+ else:
+ # slightly different formula if it's past Thursday this week. We need to go forward to
+ # Thursday of next week
+ today += datetime.timedelta(days=(10 - numeric_today))
+
+ next_release = today + datetime.timedelta(days=days_in_the_future)
+ return next_release
+
+
+def is_ansible_base(version):
+ '''
+ Determines if a version is an ansible-base version or not, by checking
+ if it is >= 2.10.0. Stops comparing when it gets to the first non-numeric
+ component to allow for .dev and .beta suffixes.
+ '''
+ # Ignore .beta/.dev suffixes
+ ver_split = []
+ for component in version.split('.'):
+ if not component.isdigit():
+ if 'rc' in component:
+ ver_split.append(int(component.split('rc')[0]))
+ if 'b' in component:
+ ver_split.append(int(component.split('b')[0]))
+ continue
+ ver_split.append(int(component))
+ return tuple(ver_split) >= (2, 10, 0)
+
+
+# Currently only use with a single element list, but left general for later
+# in case we need to refer to the releases collectively.
+def release_variants(versions):
+ if all(is_ansible_base(v) for v in versions):
+ return 'ansible-base'
+
+ if all(not is_ansible_base(v) for v in versions):
+ return 'Ansible'
+
+ return 'Ansible and ansible-base'
+
+
+def pretty_version(version):
+ return '{0} {1}'.format(
+ release_variants([version]),
+ version,
+ )
+
+
+def create_long_message(versions, name):
+ hashes = asyncio.run(get_hashes(versions))
+
+ version_template = JINJA_ENV.get_template('version_string')
+ version_str = version_template.render(versions=versions,
+ pretty_version=pretty_version).strip()
+
+ next_release = next_release_date()
+
+ template = JINJA_ENV.get_template('long')
+ message = template.render(versions=versions, version_str=version_str,
+ name=name, hashes=hashes, next_release=next_release,
+ is_ansible_base=is_ansible_base,
+ pretty_version=pretty_version)
+ return message
+
+
+def create_short_message(versions):
+ version_template = JINJA_ENV.get_template('version_string')
+ version_str = version_template.render(versions=versions,
+ pretty_version=pretty_version).strip()
+
+ template = JINJA_ENV.get_template('short')
+ message = template.render(versions=versions, version_str=version_str,
+ is_ansible_base=is_ansible_base,
+ pretty_version=pretty_version)
+ message = ' '.join(message.split()) + '\n'
+ return message
diff --git a/hacking/build_library/build_ansible/change_detection.py b/hacking/build_library/build_ansible/change_detection.py
new file mode 100644
index 0000000..22e21d3
--- /dev/null
+++ b/hacking/build_library/build_ansible/change_detection.py
@@ -0,0 +1,33 @@
+# Copyright: (c) 2018, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+def update_file_if_different(filename, b_data):
+ """
+ Replace file content only if content is different.
+
+ This preserves timestamps in case the file content has not changed. It performs multiple
+ operations on the file so it is not atomic and may be slower than simply writing to the file.
+
+ :arg filename: The filename to write to
+ :b_data: Byte string containing the data to write to the file
+ """
+ try:
+ with open(filename, 'rb') as f:
+ b_data_old = f.read()
+ except IOError as e:
+ if e.errno != 2:
+ raise
+ # File did not exist, set b_data_old to a sentinel value so that
+ # b_data gets written to the filename
+ b_data_old = None
+
+ if b_data_old != b_data:
+ with open(filename, 'wb') as f:
+ f.write(b_data)
+ return True
+
+ return False
diff --git a/hacking/build_library/build_ansible/command_plugins/collection_meta.py b/hacking/build_library/build_ansible/command_plugins/collection_meta.py
new file mode 100644
index 0000000..41b1077
--- /dev/null
+++ b/hacking/build_library/build_ansible/command_plugins/collection_meta.py
@@ -0,0 +1,72 @@
+# coding: utf-8
+# Copyright: (c) 2019, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os
+import os.path
+import pathlib
+
+import yaml
+from ansible.module_utils.six import string_types
+from ansible.module_utils._text import to_bytes
+from antsibull_docs.jinja2.environment import doc_environment
+
+# Pylint doesn't understand Python3 namespace modules.
+from ..change_detection import update_file_if_different # pylint: disable=relative-beyond-top-level
+from ..commands import Command # pylint: disable=relative-beyond-top-level
+
+
+DEFAULT_TEMPLATE_FILE = 'collections_galaxy_meta.rst.j2'
+DEFAULT_TEMPLATE_DIR = pathlib.Path(__file__).parents[4] / 'docs/templates'
+
+
+def normalize_options(options):
+ """Normalize the options to make for easy templating"""
+ for opt in options:
+ if isinstance(opt['description'], string_types):
+ opt['description'] = [opt['description']]
+
+
+class DocumentCollectionMeta(Command):
+ name = 'collection-meta'
+
+ @classmethod
+ def init_parser(cls, add_parser):
+ parser = add_parser(cls.name, description='Generate collection galaxy.yml documentation from shared metadata')
+ parser.add_argument("-t", "--template-file", action="store", dest="template_file",
+ default=DEFAULT_TEMPLATE_FILE,
+ help="Jinja2 template to use for the config")
+ parser.add_argument("-T", "--template-dir", action="store", dest="template_dir",
+ default=str(DEFAULT_TEMPLATE_DIR),
+ help="directory containing Jinja2 templates")
+ parser.add_argument("-o", "--output-dir", action="store", dest="output_dir", default='/tmp/',
+ help="Output directory for rst files")
+ parser.add_argument("collection_defs", metavar="COLLECTION-OPTION-DEFINITIONS.yml", type=str,
+ help="Source for collection metadata option docs")
+
+ @staticmethod
+ def main(args):
+ output_dir = os.path.abspath(args.output_dir)
+ template_file_full_path = os.path.abspath(os.path.join(args.template_dir, args.template_file))
+ template_file = os.path.basename(template_file_full_path)
+ template_dir = os.path.dirname(template_file_full_path)
+
+ with open(args.collection_defs) as f:
+ options = yaml.safe_load(f)
+
+ normalize_options(options)
+
+ env = doc_environment(template_dir)
+
+ template = env.get_template(template_file)
+ output_name = os.path.join(output_dir, template_file.replace('.j2', ''))
+ temp_vars = {'options': options}
+
+ data = to_bytes(template.render(temp_vars))
+ update_file_if_different(output_name, data)
+
+ return 0
diff --git a/hacking/build_library/build_ansible/command_plugins/docs_build.py b/hacking/build_library/build_ansible/command_plugins/docs_build.py
new file mode 100644
index 0000000..50b0f90
--- /dev/null
+++ b/hacking/build_library/build_ansible/command_plugins/docs_build.py
@@ -0,0 +1,255 @@
+# coding: utf-8
+# Copyright: (c) 2020, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import absolute_import, division, print_function
+
+import glob
+import os
+import os.path
+import pathlib
+import shutil
+from tempfile import TemporaryDirectory
+
+import yaml
+
+from ansible.release import __version__ as ansible_core__version__
+
+# Pylint doesn't understand Python3 namespace modules.
+# pylint: disable=relative-beyond-top-level
+from ..commands import Command
+from ..errors import InvalidUserInput, MissingUserInput
+# pylint: enable=relative-beyond-top-level
+
+
+__metaclass__ = type
+
+
+DEFAULT_TOP_DIR = pathlib.Path(__file__).parents[4]
+DEFAULT_OUTPUT_DIR = pathlib.Path(__file__).parents[4] / 'docs/docsite'
+
+
+class NoSuchFile(Exception):
+ """An expected file was not found."""
+
+
+#
+# Helpers
+#
+
+def find_latest_ansible_dir(build_data_working):
+ """Find the most recent ansible major version."""
+ # imports here so that they don't cause unnecessary deps for all of the plugins
+ from packaging.version import InvalidVersion, Version
+
+ ansible_directories = glob.glob(os.path.join(build_data_working, '[0-9.]*'))
+
+ # Find the latest ansible version directory
+ latest = None
+ latest_ver = Version('0')
+ for directory_name in (d for d in ansible_directories if os.path.isdir(d)):
+ try:
+ new_version = Version(os.path.basename(directory_name))
+ except InvalidVersion:
+ continue
+
+ # For the devel build, we only need ansible.in, so make sure it's there
+ if not os.path.exists(os.path.join(directory_name, 'ansible.in')):
+ continue
+
+ if new_version > latest_ver:
+ latest_ver = new_version
+ latest = directory_name
+
+ if latest is None:
+ raise NoSuchFile('Could not find an ansible data directory in {0}'.format(build_data_working))
+
+ return latest
+
+
+def parse_deps_file(filename):
+ """Parse an antsibull .deps file."""
+ with open(filename, 'r', encoding='utf-8') as f:
+ contents = f.read()
+ lines = [c for line in contents.splitlines() if (c := line.strip()) and not c.startswith('#')]
+ return dict([entry.strip() for entry in line.split(':', 1)] for line in lines)
+
+
+def write_deps_file(filename, deps_data):
+ """Write an antsibull .deps file."""
+ with open(filename, 'w', encoding='utf-8') as f:
+ for key, value in deps_data.items():
+ f.write(f'{key}: {value}\n')
+
+
+def find_latest_deps_file(build_data_working, ansible_version):
+ """Find the most recent ansible deps file for the given ansible major version."""
+ # imports here so that they don't cause unnecessary deps for all of the plugins
+ from packaging.version import Version
+
+ data_dir = os.path.join(build_data_working, ansible_version)
+ deps_files = glob.glob(os.path.join(data_dir, '*.deps'))
+ if not deps_files:
+ raise Exception('No deps files exist for version {0}'.format(ansible_version))
+
+ # Find the latest version of the deps file for this major version
+ latest = None
+ latest_ver = Version('0')
+ for filename in deps_files:
+ deps_data = parse_deps_file(filename)
+ new_version = Version(deps_data['_ansible_version'])
+ if new_version > latest_ver:
+ latest_ver = new_version
+ latest = filename
+
+ if latest is None:
+ raise NoSuchFile('Could not find an ansible deps file in {0}'.format(data_dir))
+
+ return latest
+
+
+#
+# Subcommand core
+#
+
+def generate_core_docs(args):
+ """Regenerate the documentation for all plugins listed in the plugin_to_collection_file."""
+ # imports here so that they don't cause unnecessary deps for all of the plugins
+ from antsibull_docs.cli import antsibull_docs
+
+ with TemporaryDirectory() as tmp_dir:
+ #
+ # Construct a deps file with our version of ansible_core in it
+ #
+ modified_deps_file = os.path.join(tmp_dir, 'ansible.deps')
+
+ # The _ansible_version doesn't matter since we're only building docs for core
+ deps_file_contents = {'_ansible_version': ansible_core__version__,
+ '_ansible_core_version': ansible_core__version__}
+
+ with open(modified_deps_file, 'w') as f:
+ f.write(yaml.dump(deps_file_contents))
+
+ # Generate the plugin rst
+ return antsibull_docs.run(['antsibull-docs', 'stable', '--deps-file', modified_deps_file,
+ '--ansible-core-source', str(args.top_dir),
+ '--dest-dir', args.output_dir])
+
+ # If we make this more than just a driver for antsibull:
+ # Run other rst generation
+ # Run sphinx build
+
+
+#
+# Subcommand full
+#
+
+def generate_full_docs(args):
+ """Regenerate the documentation for all plugins listed in the plugin_to_collection_file."""
+ # imports here so that they don't cause unnecessary deps for all of the plugins
+ import sh
+ from antsibull_docs.cli import antsibull_docs
+
+ with TemporaryDirectory() as tmp_dir:
+ sh.git(['clone', 'https://github.com/ansible-community/ansible-build-data'], _cwd=tmp_dir)
+ # If we want to validate that the ansible version and ansible-core branch version match,
+ # this would be the place to do it.
+
+ build_data_working = os.path.join(tmp_dir, 'ansible-build-data')
+ if args.ansible_build_data:
+ build_data_working = args.ansible_build_data
+
+ ansible_version = args.ansible_version
+ if ansible_version is None:
+ ansible_version = find_latest_ansible_dir(build_data_working)
+ params = ['devel', '--pieces-file', os.path.join(ansible_version, 'ansible.in')]
+ else:
+ latest_filename = find_latest_deps_file(build_data_working, ansible_version)
+
+ # Make a copy of the deps file so that we can set the ansible-core version we'll use
+ modified_deps_file = os.path.join(tmp_dir, 'ansible.deps')
+ shutil.copyfile(latest_filename, modified_deps_file)
+
+ # Put our version of ansible-core into the deps file
+ deps_data = parse_deps_file(modified_deps_file)
+
+ deps_data['_ansible_core_version'] = ansible_core__version__
+
+ # antsibull-docs will choke when a key `_python` is found. Remove it to work around
+ # that until antsibull-docs is fixed.
+ deps_data.pop('_python', None)
+
+ write_deps_file(modified_deps_file, deps_data)
+
+ params = ['stable', '--deps-file', modified_deps_file]
+
+ # Generate the plugin rst
+ return antsibull_docs.run(['antsibull-docs'] + params +
+ ['--ansible-core-source', str(args.top_dir),
+ '--dest-dir', args.output_dir])
+
+ # If we make this more than just a driver for antsibull:
+ # Run other rst generation
+ # Run sphinx build
+
+
+class CollectionPluginDocs(Command):
+ name = 'docs-build'
+ _ACTION_HELP = """Action to perform.
+ full: Regenerate the rst for the full ansible website.
+ core: Regenerate the rst for plugins in ansible-core and then build the website.
+ named: Regenerate the rst for the named plugins and then build the website.
+ """
+
+ @classmethod
+ def init_parser(cls, add_parser):
+ parser = add_parser(cls.name,
+ description='Generate documentation for plugins in collections.'
+ ' Plugins in collections will have a stub file in the normal plugin'
+ ' documentation location that says the module is in a collection and'
+ ' point to generated plugin documentation under the collections/'
+ ' hierarchy.')
+ # I think we should make the actions a subparser but need to look in git history and see if
+ # we tried that and changed it for some reason.
+ parser.add_argument('action', action='store', choices=('full', 'core', 'named'),
+ default='full', help=cls._ACTION_HELP)
+ parser.add_argument("-o", "--output-dir", action="store", dest="output_dir",
+ default=DEFAULT_OUTPUT_DIR,
+ help="Output directory for generated doc files")
+ parser.add_argument("-t", "--top-dir", action="store", dest="top_dir",
+ default=DEFAULT_TOP_DIR,
+ help="Toplevel directory of this ansible-core checkout or expanded"
+ " tarball.")
+ parser.add_argument("-l", "--limit-to-modules", '--limit-to', action="store",
+ dest="limit_to", default=None,
+ help="Limit building module documentation to comma-separated list of"
+ " plugins. Specify non-existing plugin name for no plugins.")
+ parser.add_argument('--ansible-version', action='store',
+ dest='ansible_version', default=None,
+ help='The version of the ansible package to make documentation for.'
+ ' This only makes sense when used with full.')
+ parser.add_argument('--ansible-build-data', action='store',
+ dest='ansible_build_data', default=None,
+ help='A checkout of the ansible-build-data repo. Useful for'
+ ' debugging.')
+
+ @staticmethod
+ def main(args):
+ # normalize and validate CLI args
+
+ if args.ansible_version and args.action != 'full':
+ raise InvalidUserInput('--ansible-version is only for use with "full".')
+
+ if not args.output_dir:
+ args.output_dir = os.path.abspath(str(DEFAULT_OUTPUT_DIR))
+
+ if args.action == 'full':
+ return generate_full_docs(args)
+
+ if args.action == 'core':
+ return generate_core_docs(args)
+ # args.action == 'named' (Invalid actions are caught by argparse)
+ raise NotImplementedError('Building docs for specific files is not yet implemented')
+
+ # return 0
diff --git a/hacking/build_library/build_ansible/command_plugins/dump_config.py b/hacking/build_library/build_ansible/command_plugins/dump_config.py
new file mode 100644
index 0000000..33591b4
--- /dev/null
+++ b/hacking/build_library/build_ansible/command_plugins/dump_config.py
@@ -0,0 +1,82 @@
+# coding: utf-8
+# Copyright: (c) 2019, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os
+import os.path
+import pathlib
+
+import yaml
+from jinja2 import Environment, FileSystemLoader
+from ansible.module_utils._text import to_bytes
+
+# Pylint doesn't understand Python3 namespace modules.
+from ..change_detection import update_file_if_different # pylint: disable=relative-beyond-top-level
+from ..commands import Command # pylint: disable=relative-beyond-top-level
+
+
+DEFAULT_TEMPLATE_FILE = 'config.rst.j2'
+DEFAULT_TEMPLATE_DIR = pathlib.Path(__file__).parents[4] / 'docs/templates'
+
+
+def fix_description(config_options):
+ '''some descriptions are strings, some are lists. workaround it...'''
+
+ for config_key in list(config_options.keys()):
+
+ # drop internal entries
+ if config_key.startswith('_'):
+ del config_options[config_key]
+ continue
+
+ description = config_options[config_key].get('description', [])
+ if isinstance(description, list):
+ desc_list = description
+ else:
+ desc_list = [description]
+ config_options[config_key]['description'] = desc_list
+ return config_options
+
+
+class DocumentConfig(Command):
+ name = 'document-config'
+
+ @classmethod
+ def init_parser(cls, add_parser):
+ parser = add_parser(cls.name, description='Generate module documentation from metadata')
+ parser.add_argument("-t", "--template-file", action="store", dest="template_file",
+ default=DEFAULT_TEMPLATE_FILE,
+ help="Jinja2 template to use for the config")
+ parser.add_argument("-T", "--template-dir", action="store", dest="template_dir",
+ default=str(DEFAULT_TEMPLATE_DIR),
+ help="directory containing Jinja2 templates")
+ parser.add_argument("-o", "--output-dir", action="store", dest="output_dir", default='/tmp/',
+ help="Output directory for rst files")
+ parser.add_argument("config_defs", metavar="CONFIG-OPTION-DEFINITIONS.yml", type=str,
+ help="Source for config option docs")
+
+ @staticmethod
+ def main(args):
+ output_dir = os.path.abspath(args.output_dir)
+ template_file_full_path = os.path.abspath(os.path.join(args.template_dir, args.template_file))
+ template_file = os.path.basename(template_file_full_path)
+ template_dir = os.path.dirname(template_file_full_path)
+
+ with open(args.config_defs) as f:
+ config_options = yaml.safe_load(f)
+
+ config_options = fix_description(config_options)
+
+ env = Environment(loader=FileSystemLoader(template_dir), trim_blocks=True,)
+ template = env.get_template(template_file)
+ output_name = os.path.join(output_dir, template_file.replace('.j2', ''))
+ temp_vars = {'config_options': config_options}
+
+ data = to_bytes(template.render(temp_vars))
+ update_file_if_different(output_name, data)
+
+ return 0
diff --git a/hacking/build_library/build_ansible/command_plugins/dump_keywords.py b/hacking/build_library/build_ansible/command_plugins/dump_keywords.py
new file mode 100644
index 0000000..e937931
--- /dev/null
+++ b/hacking/build_library/build_ansible/command_plugins/dump_keywords.py
@@ -0,0 +1,121 @@
+# coding: utf-8
+# Copyright: (c) 2019, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import importlib
+import os.path
+import pathlib
+import re
+from ansible.module_utils.compat.version import LooseVersion
+
+import jinja2
+import yaml
+from jinja2 import Environment, FileSystemLoader
+
+from ansible.module_utils._text import to_bytes
+
+# Pylint doesn't understand Python3 namespace modules.
+from ..change_detection import update_file_if_different # pylint: disable=relative-beyond-top-level
+from ..commands import Command # pylint: disable=relative-beyond-top-level
+
+
+DEFAULT_TEMPLATE_DIR = str(pathlib.Path(__file__).resolve().parents[4] / 'docs/templates')
+TEMPLATE_FILE = 'playbooks_keywords.rst.j2'
+PLAYBOOK_CLASS_NAMES = ['Play', 'Role', 'Block', 'Task']
+
+
+def load_definitions(keyword_definitions_file):
+ docs = {}
+ with open(keyword_definitions_file) as f:
+ docs = yaml.safe_load(f)
+
+ return docs
+
+
+def extract_keywords(keyword_definitions):
+ pb_keywords = {}
+ for pb_class_name in PLAYBOOK_CLASS_NAMES:
+ if pb_class_name == 'Play':
+ module_name = 'ansible.playbook'
+ else:
+ module_name = 'ansible.playbook.{0}'.format(pb_class_name.lower())
+ module = importlib.import_module(module_name)
+ playbook_class = getattr(module, pb_class_name, None)
+ if playbook_class is None:
+ raise ImportError("We weren't able to import the module {0}".format(module_name))
+
+ # Maintain order of the actual class names for our output
+ # Build up a mapping of playbook classes to the attributes that they hold
+ pb_keywords[pb_class_name] = {k: v for (k, v) in playbook_class.fattributes.items()
+ # Filter private attributes as they're not usable in playbooks
+ if not v.private}
+
+ # pick up definitions if they exist
+ for keyword in tuple(pb_keywords[pb_class_name]):
+ if keyword in keyword_definitions:
+ pb_keywords[pb_class_name][keyword] = keyword_definitions[keyword]
+ else:
+ # check if there is an alias, otherwise undocumented
+ alias = getattr(playbook_class.fattributes.get(keyword), 'alias', None)
+ if alias and alias in keyword_definitions:
+ pb_keywords[pb_class_name][alias] = keyword_definitions[alias]
+ del pb_keywords[pb_class_name][keyword]
+ else:
+ pb_keywords[pb_class_name][keyword] = ' UNDOCUMENTED!! '
+
+ # loop is really with_ for users
+ if pb_class_name == 'Task':
+ pb_keywords[pb_class_name]['with_<lookup_plugin>'] = (
+ 'The same as ``loop`` but magically adds the output of any lookup plugin to'
+ ' generate the item list.')
+
+ # local_action is implicit with action
+ if 'action' in pb_keywords[pb_class_name]:
+ pb_keywords[pb_class_name]['local_action'] = ('Same as action but also implies'
+ ' ``delegate_to: localhost``')
+
+ return pb_keywords
+
+
+def generate_page(pb_keywords, template_dir):
+ env = Environment(loader=FileSystemLoader(template_dir), trim_blocks=True,)
+ template = env.get_template(TEMPLATE_FILE)
+ tempvars = {'pb_keywords': pb_keywords, 'playbook_class_names': PLAYBOOK_CLASS_NAMES}
+
+ keyword_page = template.render(tempvars)
+ if LooseVersion(jinja2.__version__) < LooseVersion('2.10'):
+ # jinja2 < 2.10's indent filter indents blank lines. Cleanup
+ keyword_page = re.sub(' +\n', '\n', keyword_page)
+
+ return keyword_page
+
+
+class DocumentKeywords(Command):
+ name = 'document-keywords'
+
+ @classmethod
+ def init_parser(cls, add_parser):
+ parser = add_parser(cls.name, description='Generate playbook keyword documentation from'
+ ' code and descriptions')
+ parser.add_argument("-T", "--template-dir", action="store", dest="template_dir",
+ default=DEFAULT_TEMPLATE_DIR,
+ help="directory containing Jinja2 templates")
+ parser.add_argument("-o", "--output-dir", action="store", dest="output_dir",
+ default='/tmp/', help="Output directory for rst files")
+ parser.add_argument("keyword_defs", metavar="KEYWORD-DEFINITIONS.yml", type=str,
+ help="Source for playbook keyword docs")
+
+ @staticmethod
+ def main(args):
+ keyword_definitions = load_definitions(args.keyword_defs)
+ pb_keywords = extract_keywords(keyword_definitions)
+
+ keyword_page = generate_page(pb_keywords, args.template_dir)
+ outputname = os.path.join(args.output_dir, TEMPLATE_FILE.replace('.j2', ''))
+ update_file_if_different(outputname, to_bytes(keyword_page))
+
+ return 0
diff --git a/hacking/build_library/build_ansible/command_plugins/file_deprecated_issues.py b/hacking/build_library/build_ansible/command_plugins/file_deprecated_issues.py
new file mode 100644
index 0000000..139ecc4
--- /dev/null
+++ b/hacking/build_library/build_ansible/command_plugins/file_deprecated_issues.py
@@ -0,0 +1,153 @@
+# -*- coding: utf-8 -*-
+# (c) 2017, Matt Martz <matt@sivel.net>
+# (c) 2019, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+import argparse
+import os
+import time
+
+from collections import defaultdict
+
+from ansible.release import __version__ as ansible_version
+
+# Pylint doesn't understand Python3 namespace modules.
+from ..commands import Command # pylint: disable=relative-beyond-top-level
+from .. import errors # pylint: disable=relative-beyond-top-level
+
+ANSIBLE_MAJOR_VERSION = '.'.join(ansible_version.split('.')[:2])
+
+
+def get_token(token_file):
+ if token_file:
+ return token_file.read().strip()
+
+ token = os.getenv('GITHUB_TOKEN').strip()
+ if not token:
+ raise errors.MissingUserInput(
+ 'Please provide a file containing a github oauth token with public_repo scope'
+ ' via the --github-token argument or set the GITHUB_TOKEN env var with your'
+ ' github oauth token'
+ )
+ return token
+
+
+def parse_deprecations(problems_file_handle):
+ deprecated = defaultdict(list)
+ deprecation_errors = problems_file_handle.read()
+ for line in deprecation_errors.splitlines():
+ path = line.split(':')[0]
+ if path.endswith('__init__.py'):
+ component = os.path.basename(os.path.dirname(path))
+ else:
+ component, dummy = os.path.splitext(os.path.basename(path).lstrip('_'))
+
+ title = (
+ '%s contains deprecated call to be removed in %s' %
+ (component, ANSIBLE_MAJOR_VERSION)
+ )
+ deprecated[component].append(
+ dict(title=title, path=path, line=line)
+ )
+ return deprecated
+
+
+def find_project_todo_column(repo, project_name):
+ project = None
+ for project in repo.projects():
+ if project.name.lower() == project_name:
+ break
+ else:
+ raise errors.InvalidUserInput('%s was an invalid project name' % project_name)
+
+ for project_column in project.columns():
+ column_name = project_column.name.lower()
+ if 'todo' in column_name or 'backlog' in column_name or 'to do' in column_name:
+ return project_column
+
+ raise Exception('Unable to determine the todo column in'
+ ' project %s' % project_name)
+
+
+def create_issues(deprecated, body_tmpl, repo):
+ issues = []
+
+ for component, items in deprecated.items():
+ title = items[0]['title']
+ path = '\n'.join(set((i['path']) for i in items))
+ line = '\n'.join(i['line'] for i in items)
+ body = body_tmpl % dict(component=component, path=path,
+ line=line,
+ version=ANSIBLE_MAJOR_VERSION)
+
+ issue = repo.create_issue(title, body=body, labels=['deprecated'])
+ print(issue)
+ issues.append(issue)
+
+ # Sleep a little, so that the API doesn't block us
+ time.sleep(0.5)
+
+ return issues
+
+
+class FileDeprecationTickets(Command):
+ name = 'file-deprecation-tickets'
+
+ @classmethod
+ def init_parser(cls, add_parser):
+ parser = add_parser(cls.name, description='File tickets to cleanup deprecated features for'
+ ' the next release')
+ parser.add_argument('--template', default='deprecated_issue_template.md',
+ type=argparse.FileType('r'),
+ help='Path to markdown file template to be used for issue '
+ 'body. Default: %(default)s')
+ parser.add_argument('--project-name', default='', type=str,
+ help='Name of a github project to assign all issues to')
+ parser.add_argument('--github-token', type=argparse.FileType('r'),
+ help='Path to file containing a github token with public_repo scope.'
+ ' This token in this file will be used to open the deprcation'
+ ' tickets and add them to the github project. If not given,'
+ ' the GITHUB_TOKEN environment variable will be tried')
+ parser.add_argument('problems', type=argparse.FileType('r'),
+ help='Path to file containing pylint output for the '
+ 'ansible-deprecated-version check')
+
+ @staticmethod
+ def main(args):
+ try:
+ from github3 import GitHub
+ except ImportError:
+ raise errors.DependencyError(
+ 'This command needs the github3.py library installed to work'
+ )
+
+ token = get_token(args.github_token)
+ args.github_token.close()
+
+ deprecated = parse_deprecations(args.problems)
+ args.problems.close()
+
+ body_tmpl = args.template.read()
+ args.template.close()
+
+ project_name = args.project_name.strip().lower()
+
+ gh_conn = GitHub(token=token)
+ repo = gh_conn.repository('abadger', 'ansible')
+
+ if project_name:
+ project_column = find_project_todo_column(repo, project_name)
+
+ issues = create_issues(deprecated, body_tmpl, repo)
+
+ if project_column:
+ for issue in issues:
+ project_column.create_card_with_issue(issue)
+ time.sleep(0.5)
+
+ return 0
diff --git a/hacking/build_library/build_ansible/command_plugins/generate_man.py b/hacking/build_library/build_ansible/command_plugins/generate_man.py
new file mode 100644
index 0000000..3795c0d
--- /dev/null
+++ b/hacking/build_library/build_ansible/command_plugins/generate_man.py
@@ -0,0 +1,303 @@
+# coding: utf-8
+# Copyright: (c) 2019, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+import argparse
+import os.path
+import pathlib
+import sys
+
+from jinja2 import Environment, FileSystemLoader
+
+from ansible.module_utils._text import to_bytes
+
+# Pylint doesn't understand Python3 namespace modules.
+from ..change_detection import update_file_if_different # pylint: disable=relative-beyond-top-level
+from ..commands import Command # pylint: disable=relative-beyond-top-level
+
+
+DEFAULT_TEMPLATE_FILE = pathlib.Path(__file__).parents[4] / 'docs/templates/man.j2'
+
+
+# from https://www.python.org/dev/peps/pep-0257/
+def trim_docstring(docstring):
+ if not docstring:
+ return ''
+ # Convert tabs to spaces (following the normal Python rules)
+ # and split into a list of lines:
+ lines = docstring.expandtabs().splitlines()
+ # Determine minimum indentation (first line doesn't count):
+ indent = sys.maxsize
+ for line in lines[1:]:
+ stripped = line.lstrip()
+ if stripped:
+ indent = min(indent, len(line) - len(stripped))
+ # Remove indentation (first line is special):
+ trimmed = [lines[0].strip()]
+ if indent < sys.maxsize:
+ for line in lines[1:]:
+ trimmed.append(line[indent:].rstrip())
+ # Strip off trailing and leading blank lines:
+ while trimmed and not trimmed[-1]:
+ trimmed.pop()
+ while trimmed and not trimmed[0]:
+ trimmed.pop(0)
+ # Return a single string:
+ return '\n'.join(trimmed)
+
+
+def get_options(optlist):
+ ''' get actual options '''
+
+ opts = []
+ for opt in optlist:
+ res = {
+ 'desc': opt.help,
+ 'options': opt.option_strings
+ }
+ if isinstance(opt, argparse._StoreAction):
+ res['arg'] = opt.dest.upper()
+ elif not res['options']:
+ continue
+ opts.append(res)
+
+ return opts
+
+
+def dedupe_groups(parser):
+ action_groups = []
+ for action_group in parser._action_groups:
+ found = False
+ for a in action_groups:
+ if a._actions == action_group._actions:
+ found = True
+ break
+ if not found:
+ action_groups.append(action_group)
+ return action_groups
+
+
+def get_option_groups(option_parser):
+ groups = []
+ for action_group in dedupe_groups(option_parser)[1:]:
+ group_info = {}
+ group_info['desc'] = action_group.description
+ group_info['options'] = action_group._actions
+ group_info['group_obj'] = action_group
+ groups.append(group_info)
+ return groups
+
+
+def opt_doc_list(parser):
+ ''' iterate over options lists '''
+
+ results = []
+ for option_group in dedupe_groups(parser)[1:]:
+ results.extend(get_options(option_group._actions))
+
+ results.extend(get_options(parser._actions))
+
+ return results
+
+
+# def opts_docs(cli, name):
+def opts_docs(cli_class_name, cli_module_name):
+ ''' generate doc structure from options '''
+
+ cli_name = 'ansible-%s' % cli_module_name
+ if cli_module_name == 'adhoc':
+ cli_name = 'ansible'
+
+ # WIth no action/subcommand
+ # shared opts set
+ # instantiate each cli and ask its options
+ cli_klass = getattr(__import__("ansible.cli.%s" % cli_module_name,
+ fromlist=[cli_class_name]), cli_class_name)
+ cli = cli_klass([cli_name])
+
+ # parse the common options
+ try:
+ cli.init_parser()
+ except Exception:
+ pass
+
+ # base/common cli info
+ docs = {
+ 'cli': cli_module_name,
+ 'cli_name': cli_name,
+ 'usage': cli.parser.format_usage(),
+ 'short_desc': cli.parser.description,
+ 'long_desc': trim_docstring(cli.__doc__),
+ 'actions': {},
+ 'content_depth': 2,
+ }
+ option_info = {'option_names': [],
+ 'options': [],
+ 'groups': []}
+
+ for extras in ('ARGUMENTS'):
+ if hasattr(cli, extras):
+ docs[extras.lower()] = getattr(cli, extras)
+
+ common_opts = opt_doc_list(cli.parser)
+ groups_info = get_option_groups(cli.parser)
+ shared_opt_names = []
+ for opt in common_opts:
+ shared_opt_names.extend(opt.get('options', []))
+
+ option_info['options'] = common_opts
+ option_info['option_names'] = shared_opt_names
+
+ option_info['groups'].extend(groups_info)
+
+ docs.update(option_info)
+
+ # now for each action/subcommand
+ # force populate parser with per action options
+
+ def get_actions(parser, docs):
+ # use class attrs not the attrs on a instance (not that it matters here...)
+ try:
+ subparser = parser._subparsers._group_actions[0].choices
+ except AttributeError:
+ subparser = {}
+
+ depth = 0
+
+ for action, parser in subparser.items():
+ action_info = {'option_names': [],
+ 'options': [],
+ 'actions': {}}
+ # docs['actions'][action] = {}
+ # docs['actions'][action]['name'] = action
+ action_info['name'] = action
+ action_info['desc'] = trim_docstring(getattr(cli, 'execute_%s' % action).__doc__)
+
+ # docs['actions'][action]['desc'] = getattr(cli, 'execute_%s' % action).__doc__.strip()
+ action_doc_list = opt_doc_list(parser)
+
+ uncommon_options = []
+ for action_doc in action_doc_list:
+ # uncommon_options = []
+
+ option_aliases = action_doc.get('options', [])
+ for option_alias in option_aliases:
+
+ if option_alias in shared_opt_names:
+ continue
+
+ # TODO: use set
+ if option_alias not in action_info['option_names']:
+ action_info['option_names'].append(option_alias)
+
+ if action_doc in action_info['options']:
+ continue
+
+ uncommon_options.append(action_doc)
+
+ action_info['options'] = uncommon_options
+
+ depth = 1 + get_actions(parser, action_info)
+
+ docs['actions'][action] = action_info
+
+ return depth
+
+ action_depth = get_actions(cli.parser, docs)
+ docs['content_depth'] = action_depth + 1
+
+ docs['options'] = opt_doc_list(cli.parser)
+ return docs
+
+
+class GenerateMan(Command):
+ name = 'generate-man'
+
+ @classmethod
+ def init_parser(cls, add_parser):
+ parser = add_parser(name=cls.name,
+ description='Generate cli documentation from cli docstrings')
+
+ parser.add_argument("-t", "--template-file", action="store", dest="template_file",
+ default=DEFAULT_TEMPLATE_FILE, help="path to jinja2 template")
+ parser.add_argument("-o", "--output-dir", action="store", dest="output_dir",
+ default='/tmp/', help="Output directory for rst files")
+ parser.add_argument("-f", "--output-format", action="store", dest="output_format",
+ default='man',
+ help="Output format for docs (the default 'man' or 'rst')")
+ parser.add_argument('cli_modules', help='CLI module name(s)', metavar='MODULE_NAME', nargs='*')
+
+ @staticmethod
+ def main(args):
+ template_file = args.template_file
+ template_path = os.path.expanduser(template_file)
+ template_dir = os.path.abspath(os.path.dirname(template_path))
+ template_basename = os.path.basename(template_file)
+
+ output_dir = os.path.abspath(args.output_dir)
+ output_format = args.output_format
+
+ cli_modules = args.cli_modules
+
+ # various cli parsing things checks sys.argv if the 'args' that are passed in are []
+ # so just remove any args so the cli modules dont try to parse them resulting in warnings
+ sys.argv = [sys.argv[0]]
+
+ allvars = {}
+ output = {}
+ cli_list = []
+ cli_bin_name_list = []
+
+ # for binary in os.listdir('../../lib/ansible/cli'):
+ for cli_module_name in cli_modules:
+ binary = os.path.basename(os.path.expanduser(cli_module_name))
+
+ if not binary.endswith('.py'):
+ continue
+ elif binary == '__init__.py':
+ continue
+
+ cli_name = os.path.splitext(binary)[0]
+
+ if cli_name == 'adhoc':
+ cli_class_name = 'AdHocCLI'
+ # myclass = 'AdHocCLI'
+ output[cli_name] = 'ansible.1.rst.in'
+ cli_bin_name = 'ansible'
+ else:
+ # myclass = "%sCLI" % libname.capitalize()
+ cli_class_name = "%sCLI" % cli_name.capitalize()
+ output[cli_name] = 'ansible-%s.1.rst.in' % cli_name
+ cli_bin_name = 'ansible-%s' % cli_name
+
+ # FIXME:
+ allvars[cli_name] = opts_docs(cli_class_name, cli_name)
+ cli_bin_name_list.append(cli_bin_name)
+
+ cli_list = allvars.keys()
+
+ doc_name_formats = {'man': '%s.1.rst.in',
+ 'rst': '%s.rst'}
+
+ for cli_name in cli_list:
+
+ # template it!
+ env = Environment(loader=FileSystemLoader(template_dir))
+ template = env.get_template(template_basename)
+
+ # add rest to vars
+ tvars = allvars[cli_name]
+ tvars['cli_list'] = cli_list
+ tvars['cli_bin_name_list'] = cli_bin_name_list
+ tvars['cli'] = cli_name
+ if '-i' in tvars['options']:
+ print('uses inventory')
+
+ manpage = template.render(tvars)
+ filename = os.path.join(output_dir, doc_name_formats[output_format] % tvars['cli_name'])
+ update_file_if_different(filename, to_bytes(manpage))
diff --git a/hacking/build_library/build_ansible/command_plugins/porting_guide.py b/hacking/build_library/build_ansible/command_plugins/porting_guide.py
new file mode 100644
index 0000000..431485b
--- /dev/null
+++ b/hacking/build_library/build_ansible/command_plugins/porting_guide.py
@@ -0,0 +1,138 @@
+# coding: utf-8
+# Copyright: (c) 2019, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+from jinja2 import Environment, DictLoader
+
+# Pylint doesn't understand Python3 namespace modules.
+from ..commands import Command # pylint: disable=relative-beyond-top-level
+
+
+PORTING_GUIDE_TEMPLATE = """
+.. _porting_{{ ver }}_guide_core:
+
+*******************************
+Ansible-core {{ ver }} Porting Guide
+*******************************
+
+This section discusses the behavioral changes between ``ansible-core`` {{ prev_ver }} and ``ansible-core`` {{ ver }}.
+
+It is intended to assist in updating your playbooks, plugins and other parts of your Ansible infrastructure so they will work with this version of Ansible.
+
+We suggest you read this page along with `ansible-core Changelog for {{ ver }} <https://github.com/ansible/ansible/blob/stable-{{ ver }}/changelogs/CHANGELOG-v{{ ver }}.rst>`_ to understand what updates you may need to make.
+
+This document is part of a collection on porting. The complete list of porting guides can be found at :ref:`porting guides <porting_guides>`.
+
+.. contents:: Topics
+
+
+Playbook
+========
+
+No notable changes
+
+
+Command Line
+============
+
+No notable changes
+
+
+Deprecated
+==========
+
+No notable changes
+
+
+Modules
+=======
+
+No notable changes
+
+
+Modules removed
+---------------
+
+The following modules no longer exist:
+
+* No notable changes
+
+
+Deprecation notices
+-------------------
+
+No notable changes
+
+
+Noteworthy module changes
+-------------------------
+
+No notable changes
+
+
+Plugins
+=======
+
+No notable changes
+
+
+Porting custom scripts
+======================
+
+No notable changes
+
+
+Networking
+==========
+
+No notable changes
+
+""" # noqa for E501 (line length).
+# jinja2 is horrid about getting rid of extra newlines so we have to have a single line per
+# paragraph for proper wrapping to occur
+
+JINJA_ENV = Environment(
+ loader=DictLoader({'porting_guide': PORTING_GUIDE_TEMPLATE,
+ }),
+ extensions=['jinja2.ext.i18n'],
+ trim_blocks=True,
+ lstrip_blocks=True,
+)
+
+
+def generate_porting_guide(version):
+ template = JINJA_ENV.get_template('porting_guide')
+
+ version_list = version.split('.')
+ version_list[-1] = str(int(version_list[-1]) - 1)
+ previous_version = '.'.join(version_list)
+
+ content = template.render(ver=version, prev_ver=previous_version)
+ return content
+
+
+def write_guide(version, guide_content):
+ filename = 'docs/docsite/rst/porting_guides/porting_guide_core_{0}.rst'.format(version)
+ with open(filename, 'w') as out_file:
+ out_file.write(guide_content)
+
+
+class PortingGuideCommand(Command):
+ name = 'porting-guide'
+
+ @classmethod
+ def init_parser(cls, add_parser):
+ parser = add_parser(cls.name, description="Generate a fresh porting guide template")
+ parser.add_argument("--version", dest="version", type=str, required=True, action='store',
+ help="Version of Ansible to write the porting guide for")
+
+ @staticmethod
+ def main(args):
+ guide_content = generate_porting_guide(args.version)
+ write_guide(args.version, guide_content)
+ return 0
diff --git a/hacking/build_library/build_ansible/command_plugins/release_announcement.py b/hacking/build_library/build_ansible/command_plugins/release_announcement.py
new file mode 100644
index 0000000..edc928a
--- /dev/null
+++ b/hacking/build_library/build_ansible/command_plugins/release_announcement.py
@@ -0,0 +1,78 @@
+# coding: utf-8
+# Copyright: (c) 2019, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+import sys
+from collections import UserString
+from ansible.module_utils.compat.version import LooseVersion
+
+# Pylint doesn't understand Python3 namespace modules.
+from ..commands import Command # pylint: disable=relative-beyond-top-level
+from .. import errors # pylint: disable=relative-beyond-top-level
+
+
+class VersionStr(UserString):
+ def __init__(self, string):
+ super().__init__(string.strip())
+ self.ver_obj = LooseVersion(string)
+
+
+def transform_args(args):
+ # Make it possible to sort versions in the jinja2 templates
+ new_versions = []
+ for version in args.versions:
+ new_versions.append(VersionStr(version))
+ args.versions = new_versions
+
+ return args
+
+
+def write_message(filename, message):
+ if filename != '-':
+ with open(filename, 'w') as out_file:
+ out_file.write(message)
+ else:
+ sys.stdout.write('\n\n')
+ sys.stdout.write(message)
+
+
+class ReleaseAnnouncementCommand(Command):
+ name = 'release-announcement'
+
+ @classmethod
+ def init_parser(cls, add_parser):
+ parser = add_parser(cls.name,
+ description="Generate email and twitter announcements from template")
+
+ parser.add_argument("--version", dest="versions", type=str, required=True, action='append',
+ help="Versions of Ansible to announce")
+ parser.add_argument("--name", type=str, required=True, help="Real name to use on emails")
+ parser.add_argument("--email-out", type=str, default="-",
+ help="Filename to place the email announcement into")
+ parser.add_argument("--twitter-out", type=str, default="-",
+ help="Filename to place the twitter announcement into")
+
+ @classmethod
+ def main(cls, args):
+ if sys.version_info < (3, 6):
+ raise errors.DependencyError('The {0} subcommand needs Python-3.6+'
+ ' to run'.format(cls.name))
+
+ # Import here because these functions are invalid on Python-3.5 and the command plugins and
+ # init_parser() method need to be compatible with Python-3.4+ for now.
+ # Pylint doesn't understand Python3 namespace modules.
+ from .. announce import create_short_message, create_long_message # pylint: disable=relative-beyond-top-level
+
+ args = transform_args(args)
+
+ twitter_message = create_short_message(args.versions)
+ email_message = create_long_message(args.versions, args.name)
+
+ write_message(args.twitter_out, twitter_message)
+ write_message(args.email_out, email_message)
+ return 0
diff --git a/hacking/build_library/build_ansible/command_plugins/update_intersphinx.py b/hacking/build_library/build_ansible/command_plugins/update_intersphinx.py
new file mode 100644
index 0000000..9337859
--- /dev/null
+++ b/hacking/build_library/build_ansible/command_plugins/update_intersphinx.py
@@ -0,0 +1,101 @@
+# -*- coding: utf-8 -*-
+# (c) 2020, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+import argparse
+import importlib
+import os
+import pathlib
+import time
+import urllib.parse
+
+from collections import defaultdict
+
+from ansible.module_utils.common.collections import is_iterable
+from ansible.module_utils.urls import Request
+
+# Pylint doesn't understand Python3 namespace modules.
+from ..commands import Command # pylint: disable=relative-beyond-top-level
+from .. import errors # pylint: disable=relative-beyond-top-level
+
+
+EXAMPLE_CONF = """
+A proper intersphinx_mapping entry should look like:
+ intersphinx_mapping = {
+ 'python3': ('https://docs.python.org/3', (None, 'python3.inv'))
+ }
+
+See the intersphinx docs for more info:
+ https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html#confval-intersphinx_mapping
+"""
+
+
+class UpdateIntersphinxCache(Command):
+ name = 'update-intersphinx-cache'
+
+ @classmethod
+ def init_parser(cls, add_parser):
+ parser = add_parser(cls.name, description='Update cached intersphinx mappings. This'
+ ' updates the cached intersphinx mappings for docs to reference'
+ ' documentation from other projects.')
+ parser.add_argument('-o', '--output-dir', action='store',
+ help='Path to directory the cached objects.inv files are stored in')
+ parser.add_argument('-c', '--conf-file', action='store',
+ help='Path to a sphinx config file to retrieve intersphinx config from')
+
+ @staticmethod
+ def main(args):
+ # Retrieve the intersphinx information from the sphinx config file
+ conf_dir = pathlib.Path(args.conf_file).parent
+
+ conf_module_spec = importlib.util.spec_from_file_location('sphinxconf', args.conf_file)
+ conf_module = importlib.util.module_from_spec(conf_module_spec)
+ conf_module_spec.loader.exec_module(conf_module)
+ intersphinx_mapping = conf_module.intersphinx_mapping
+
+ for intersphinx_name, inventory in intersphinx_mapping.items():
+ if not is_iterable(inventory) or len(inventory) != 2:
+ print('WARNING: The intersphinx entry for {0} must be'
+ ' a two-tuple.\n{1}'.format(intersphinx_name, EXAMPLE_CONF))
+ continue
+
+ url = cache_file = None
+ for inv_source in inventory:
+ if isinstance(inv_source, str) and url is None:
+ url = inv_source
+ elif is_iterable(inv_source) and cache_file is None:
+ if len(inv_source) != 2:
+ print('WARNING: The fallback entry for {0} should be a tuple of (None,'
+ ' filename).\n{1}'.format(intersphinx_name, EXAMPLE_CONF))
+ continue
+ cache_file = inv_source[1]
+ else:
+ print('WARNING: The configuration for {0} should be a tuple of one url and one'
+ ' tuple for a fallback filename.\n{1}'.format(intersphinx_name,
+ EXAMPLE_CONF))
+ continue
+
+ if url is None or cache_file is None:
+ print('WARNING: Could not figure out the url or fallback'
+ ' filename for {0}.\n{1}'.format(intersphinx_name, EXAMPLE_CONF))
+ continue
+
+ url = urllib.parse.urljoin(url, 'objects.inv')
+ # Resolve any relative cache files to be relative to the conf file
+ cache_file = conf_dir / cache_file
+
+ # Retrieve the inventory and cache it
+ # The jinja CDN seems to be blocking the default urllib User-Agent
+ requestor = Request(headers={'User-Agent': 'Definitely Not Python ;-)'})
+ with requestor.open('GET', url) as source_file:
+ with open(cache_file, 'wb') as f:
+ f.write(source_file.read())
+
+ print('Download of new cache files complete. Remember to git commit -a the changes')
+
+ return 0
diff --git a/hacking/build_library/build_ansible/commands.py b/hacking/build_library/build_ansible/commands.py
new file mode 100644
index 0000000..8267993
--- /dev/null
+++ b/hacking/build_library/build_ansible/commands.py
@@ -0,0 +1,50 @@
+# coding: utf-8
+# Copyright: (c) 2019, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+from abc import ABCMeta, abstractmethod, abstractproperty
+
+
+class Command(metaclass=ABCMeta):
+ """
+ Subcommands of :program:`build-ansible.py`.
+
+ This defines an interface that all subcommands must conform to. :program:`build-ansible.py`
+ will require that these things are present in order to proceed.
+ """
+ @staticmethod
+ @abstractproperty
+ def name():
+ """Name of the subcommand. It's the string to invoked it via on the command line"""
+
+ @staticmethod
+ @abstractmethod
+ def init_parser(add_parser):
+ """
+ Initialize and register an argparse ArgumentParser
+
+ :arg add_parser: function which creates an ArgumentParser for the main program.
+
+ Implementations should first create an ArgumentParser using `add_parser` and then populate
+ it with the command line arguments that are needed.
+
+ .. seealso:
+ `add_parser` information in the :py:meth:`ArgumentParser.add_subparsers` documentation.
+ """
+
+ @staticmethod
+ @abstractmethod
+ def main(arguments):
+ """
+ Run the command
+
+ :arg arguments: The **parsed** command line args
+
+ This is the Command's entrypoint. The command line args are already parsed but from here
+ on, the command can do its work.
+ """
diff --git a/hacking/build_library/build_ansible/errors.py b/hacking/build_library/build_ansible/errors.py
new file mode 100644
index 0000000..a53d1fb
--- /dev/null
+++ b/hacking/build_library/build_ansible/errors.py
@@ -0,0 +1,19 @@
+# coding: utf-8
+# Copyright: (c) 2019, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+class DependencyError(Exception):
+ """A dependency was unmet"""
+
+
+class MissingUserInput(Exception):
+ """The user failed to provide input (via cli arg or interactively"""
+
+
+class InvalidUserInput(Exception):
+ """The user provided invalid input"""
diff --git a/hacking/test-module.py b/hacking/test-module.py
new file mode 100755
index 0000000..54343e0
--- /dev/null
+++ b/hacking/test-module.py
@@ -0,0 +1,292 @@
+#!/usr/bin/env python
+
+# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+#
+
+# this script is for testing modules without running through the
+# entire guts of ansible, and is very helpful for when developing
+# modules
+#
+# example:
+# ./hacking/test-module.py -m lib/ansible/modules/command.py -a "/bin/sleep 3"
+# ./hacking/test-module.py -m lib/ansible/modules/command.py -a "/bin/sleep 3" --debugger /usr/bin/pdb
+# ./hacking/test-module.py -m lib/ansible/modules/lineinfile.py -a "dest=/etc/exports line='/srv/home hostname1(rw,sync)'" --check
+# ./hacking/test-module.py -m lib/ansible/modules/command.py -a "echo hello" -n -o "test_hello"
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import glob
+import optparse
+import os
+import subprocess
+import sys
+import traceback
+import shutil
+
+from ansible.release import __version__
+import ansible.utils.vars as utils_vars
+from ansible.parsing.dataloader import DataLoader
+from ansible.parsing.utils.jsonify import jsonify
+from ansible.parsing.splitter import parse_kv
+from ansible.executor import module_common
+import ansible.constants as C
+from ansible.module_utils._text import to_native, to_text
+from ansible.template import Templar
+
+import json
+
+
+def parse():
+ """parse command line
+
+ :return : (options, args)"""
+ parser = optparse.OptionParser()
+
+ parser.usage = "%prog -[options] (-h for help)"
+
+ parser.add_option('-m', '--module-path', dest='module_path',
+ help="REQUIRED: full path of module source to execute")
+ parser.add_option('-a', '--args', dest='module_args', default="",
+ help="module argument string")
+ parser.add_option('-D', '--debugger', dest='debugger',
+ help="path to python debugger (e.g. /usr/bin/pdb)")
+ parser.add_option('-I', '--interpreter', dest='interpreter',
+ help="path to interpreter to use for this module"
+ " (e.g. ansible_python_interpreter=/usr/bin/python)",
+ metavar='INTERPRETER_TYPE=INTERPRETER_PATH',
+ default="ansible_python_interpreter=%s" %
+ (sys.executable if sys.executable else '/usr/bin/python'))
+ parser.add_option('-c', '--check', dest='check', action='store_true',
+ help="run the module in check mode")
+ parser.add_option('-n', '--noexecute', dest='execute', action='store_false',
+ default=True, help="do not run the resulting module")
+ parser.add_option('-o', '--output', dest='filename',
+ help="Filename for resulting module",
+ default="~/.ansible_module_generated")
+ options, args = parser.parse_args()
+ if not options.module_path:
+ parser.print_help()
+ sys.exit(1)
+ else:
+ return options, args
+
+
+def write_argsfile(argstring, json=False):
+ """ Write args to a file for old-style module's use. """
+ argspath = os.path.expanduser("~/.ansible_test_module_arguments")
+ argsfile = open(argspath, 'w')
+ if json:
+ args = parse_kv(argstring)
+ argstring = jsonify(args)
+ argsfile.write(argstring)
+ argsfile.close()
+ return argspath
+
+
+def get_interpreters(interpreter):
+ result = dict()
+ if interpreter:
+ if '=' not in interpreter:
+ print("interpreter must by in the form of ansible_python_interpreter=/usr/bin/python")
+ sys.exit(1)
+ interpreter_type, interpreter_path = interpreter.split('=')
+ if not interpreter_type.startswith('ansible_'):
+ interpreter_type = 'ansible_%s' % interpreter_type
+ if not interpreter_type.endswith('_interpreter'):
+ interpreter_type = '%s_interpreter' % interpreter_type
+ result[interpreter_type] = interpreter_path
+ return result
+
+
+def boilerplate_module(modfile, args, interpreters, check, destfile):
+ """ simulate what ansible does with new style modules """
+
+ # module_fh = open(modfile)
+ # module_data = module_fh.read()
+ # module_fh.close()
+
+ # replacer = module_common.ModuleReplacer()
+ loader = DataLoader()
+
+ # included_boilerplate = module_data.find(module_common.REPLACER) != -1 or module_data.find("import ansible.module_utils") != -1
+
+ complex_args = {}
+
+ # default selinux fs list is pass in as _ansible_selinux_special_fs arg
+ complex_args['_ansible_selinux_special_fs'] = C.DEFAULT_SELINUX_SPECIAL_FS
+ complex_args['_ansible_tmpdir'] = C.DEFAULT_LOCAL_TMP
+ complex_args['_ansible_keep_remote_files'] = C.DEFAULT_KEEP_REMOTE_FILES
+ complex_args['_ansible_version'] = __version__
+
+ if args.startswith("@"):
+ # Argument is a YAML file (JSON is a subset of YAML)
+ complex_args = utils_vars.combine_vars(complex_args, loader.load_from_file(args[1:]))
+ args = ''
+ elif args.startswith("{"):
+ # Argument is a YAML document (not a file)
+ complex_args = utils_vars.combine_vars(complex_args, loader.load(args))
+ args = ''
+
+ if args:
+ parsed_args = parse_kv(args)
+ complex_args = utils_vars.combine_vars(complex_args, parsed_args)
+
+ task_vars = interpreters
+
+ if check:
+ complex_args['_ansible_check_mode'] = True
+
+ modname = os.path.basename(modfile)
+ modname = os.path.splitext(modname)[0]
+ (module_data, module_style, shebang) = module_common.modify_module(
+ modname,
+ modfile,
+ complex_args,
+ Templar(loader=loader),
+ task_vars=task_vars
+ )
+
+ if module_style == 'new' and '_ANSIBALLZ_WRAPPER = True' in to_native(module_data):
+ module_style = 'ansiballz'
+
+ modfile2_path = os.path.expanduser(destfile)
+ print("* including generated source, if any, saving to: %s" % modfile2_path)
+ if module_style not in ('ansiballz', 'old'):
+ print("* this may offset any line numbers in tracebacks/debuggers!")
+ modfile2 = open(modfile2_path, 'wb')
+ modfile2.write(module_data)
+ modfile2.close()
+ modfile = modfile2_path
+
+ return (modfile2_path, modname, module_style)
+
+
+def ansiballz_setup(modfile, modname, interpreters):
+ os.system("chmod +x %s" % modfile)
+
+ if 'ansible_python_interpreter' in interpreters:
+ command = [interpreters['ansible_python_interpreter']]
+ else:
+ command = []
+ command.extend([modfile, 'explode'])
+
+ cmd = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = cmd.communicate()
+ out, err = to_text(out, errors='surrogate_or_strict'), to_text(err)
+ lines = out.splitlines()
+ if len(lines) != 2 or 'Module expanded into' not in lines[0]:
+ print("*" * 35)
+ print("INVALID OUTPUT FROM ANSIBALLZ MODULE WRAPPER")
+ print(out)
+ sys.exit(err)
+ debug_dir = lines[1].strip()
+
+ # All the directories in an AnsiBallZ that modules can live
+ core_dirs = glob.glob(os.path.join(debug_dir, 'ansible/modules'))
+ collection_dirs = glob.glob(os.path.join(debug_dir, 'ansible_collections/*/*/plugins/modules'))
+
+ # There's only one module in an AnsiBallZ payload so look for the first module and then exit
+ for module_dir in core_dirs + collection_dirs:
+ for dirname, directories, filenames in os.walk(module_dir):
+ for filename in filenames:
+ if filename == modname + '.py':
+ modfile = os.path.join(dirname, filename)
+ break
+
+ argsfile = os.path.join(debug_dir, 'args')
+
+ print("* ansiballz module detected; extracted module source to: %s" % debug_dir)
+ return modfile, argsfile
+
+
+def runtest(modfile, argspath, modname, module_style, interpreters):
+ """Test run a module, piping it's output for reporting."""
+ invoke = ""
+ if module_style == 'ansiballz':
+ modfile, argspath = ansiballz_setup(modfile, modname, interpreters)
+ if 'ansible_python_interpreter' in interpreters:
+ invoke = "%s " % interpreters['ansible_python_interpreter']
+
+ os.system("chmod +x %s" % modfile)
+
+ invoke = "%s%s" % (invoke, modfile)
+ if argspath is not None:
+ invoke = "%s %s" % (invoke, argspath)
+
+ cmd = subprocess.Popen(invoke, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ (out, err) = cmd.communicate()
+ out, err = to_text(out), to_text(err)
+
+ try:
+ print("*" * 35)
+ print("RAW OUTPUT")
+ print(out)
+ print(err)
+ results = json.loads(out)
+ except Exception:
+ print("*" * 35)
+ print("INVALID OUTPUT FORMAT")
+ print(out)
+ traceback.print_exc()
+ sys.exit(1)
+
+ print("*" * 35)
+ print("PARSED OUTPUT")
+ print(jsonify(results, format=True))
+
+
+def rundebug(debugger, modfile, argspath, modname, module_style, interpreters):
+ """Run interactively with console debugger."""
+
+ if module_style == 'ansiballz':
+ modfile, argspath = ansiballz_setup(modfile, modname, interpreters)
+
+ if argspath is not None:
+ subprocess.call("%s %s %s" % (debugger, modfile, argspath), shell=True)
+ else:
+ subprocess.call("%s %s" % (debugger, modfile), shell=True)
+
+
+def main():
+
+ options, args = parse()
+ interpreters = get_interpreters(options.interpreter)
+ (modfile, modname, module_style) = boilerplate_module(options.module_path, options.module_args, interpreters, options.check, options.filename)
+
+ argspath = None
+ if module_style not in ('new', 'ansiballz'):
+ if module_style in ('non_native_want_json', 'binary'):
+ argspath = write_argsfile(options.module_args, json=True)
+ elif module_style == 'old':
+ argspath = write_argsfile(options.module_args, json=False)
+ else:
+ raise Exception("internal error, unexpected module style: %s" % module_style)
+
+ if options.execute:
+ if options.debugger:
+ rundebug(options.debugger, modfile, argspath, modname, module_style, interpreters)
+ else:
+ runtest(modfile, argspath, modname, module_style, interpreters)
+
+
+if __name__ == "__main__":
+ try:
+ main()
+ finally:
+ shutil.rmtree(C.DEFAULT_LOCAL_TMP, True)
diff --git a/hacking/update-sanity-requirements.py b/hacking/update-sanity-requirements.py
new file mode 100755
index 0000000..747f058
--- /dev/null
+++ b/hacking/update-sanity-requirements.py
@@ -0,0 +1,112 @@
+#!/usr/bin/env python
+# PYTHON_ARGCOMPLETE_OK
+"""Generate frozen sanity test requirements from source requirements files."""
+
+from __future__ import annotations
+
+import argparse
+import dataclasses
+import pathlib
+import subprocess
+import tempfile
+import typing as t
+import venv
+
+try:
+ import argcomplete
+except ImportError:
+ argcomplete = None
+
+
+FILE = pathlib.Path(__file__).resolve()
+ROOT = FILE.parent.parent
+SELF = FILE.relative_to(ROOT)
+
+
+@dataclasses.dataclass(frozen=True)
+class SanityTest:
+ name: str
+ requirements_path: pathlib.Path
+ source_path: pathlib.Path
+
+ def freeze_requirements(self) -> None:
+ with tempfile.TemporaryDirectory() as venv_dir:
+ venv.create(venv_dir, with_pip=True)
+
+ python = pathlib.Path(venv_dir, 'bin', 'python')
+ pip = [python, '-m', 'pip', '--disable-pip-version-check']
+ env = dict()
+
+ pip_freeze = subprocess.run(pip + ['freeze'], env=env, check=True, capture_output=True, text=True)
+
+ if pip_freeze.stdout:
+ raise Exception(f'Initial virtual environment is not empty:\n{pip_freeze.stdout}')
+
+ subprocess.run(pip + ['install', 'wheel'], env=env, check=True) # make bdist_wheel available during pip install
+ subprocess.run(pip + ['install', '-r', self.source_path], env=env, check=True)
+
+ pip_freeze = subprocess.run(pip + ['freeze'], env=env, check=True, capture_output=True, text=True)
+
+ requirements = f'# edit "{self.source_path.name}" and generate with: {SELF} --test {self.name}\n{pip_freeze.stdout}'
+
+ with open(self.requirements_path, 'w') as requirement_file:
+ requirement_file.write(requirements)
+
+ @staticmethod
+ def create(path: pathlib.Path) -> SanityTest:
+ return SanityTest(
+ name=path.stem.replace('sanity.', '').replace('.requirements', ''),
+ requirements_path=path,
+ source_path=path.with_suffix('.in'),
+ )
+
+
+def main() -> None:
+ tests = find_tests()
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--test',
+ metavar='TEST',
+ dest='test_names',
+ action='append',
+ choices=[test.name for test in tests],
+ help='test requirements to update'
+ )
+
+ if argcomplete:
+ argcomplete.autocomplete(parser)
+
+ args = parser.parse_args()
+ test_names: set[str] = set(args.test_names or [])
+
+ tests = [test for test in tests if test.name in test_names] if test_names else tests
+
+ for test in tests:
+ print(f'===[ {test.name} ]===')
+ test.freeze_requirements()
+
+
+def find_tests() -> t.List[SanityTest]:
+ globs = (
+ 'test/lib/ansible_test/_data/requirements/sanity.*.txt',
+ 'test/sanity/code-smell/*.requirements.txt',
+ )
+
+ tests: t.List[SanityTest] = []
+
+ for glob in globs:
+ tests.extend(get_tests(pathlib.Path(glob)))
+
+ return sorted(tests, key=lambda test: test.name)
+
+
+def get_tests(glob: pathlib.Path) -> t.List[SanityTest]:
+ path = pathlib.Path(ROOT, glob.parent)
+ pattern = glob.name
+
+ return [SanityTest.create(item) for item in path.glob(pattern)]
+
+
+if __name__ == '__main__':
+ main()