diff options
Diffstat (limited to 'hacking')
16 files changed, 1766 insertions, 0 deletions
diff --git a/hacking/build-ansible.py b/hacking/build-ansible.py new file mode 100755 index 00000000..8ebb88d3 --- /dev/null +++ b/hacking/build-ansible.py @@ -0,0 +1,103 @@ +#!/usr/bin/env python3 +# coding: utf-8 +# PYTHON_ARGCOMPLETE_OK +# Copyright: (c) 2019, Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +import argparse +import os.path +import sys + +from straight.plugin import load + +try: + import argcomplete +except ImportError: + argcomplete = None + + +def build_lib_path(this_script=__file__): + """Return path to the common build library directory.""" + hacking_dir = os.path.dirname(this_script) + libdir = os.path.abspath(os.path.join(hacking_dir, 'build_library')) + + return libdir + + +def ansible_lib_path(this_script=__file__): + """Return path to the common build library directory.""" + hacking_dir = os.path.dirname(this_script) + libdir = os.path.abspath(os.path.join(hacking_dir, '..', 'lib')) + + return libdir + + +sys.path.insert(0, ansible_lib_path()) +sys.path.insert(0, build_lib_path()) + + +from build_ansible import commands, errors + + +def create_arg_parser(program_name): + """ + Creates a command line argument parser + + :arg program_name: The name of the script. Used in help texts + """ + parser = argparse.ArgumentParser(prog=program_name, + description="Implements utilities to build Ansible") + return parser + + +def main(): + """ + Start our run. + + "It all starts here" + """ + subcommands = load('build_ansible.command_plugins', subclasses=commands.Command) + + arg_parser = create_arg_parser(os.path.basename(sys.argv[0])) + arg_parser.add_argument('--debug', dest='debug', required=False, default=False, + action='store_true', + help='Show tracebacks and other debugging information') + subparsers = arg_parser.add_subparsers(title='Subcommands', dest='command', + help='for help use build-ansible.py SUBCOMMANDS -h') + subcommands.pipe('init_parser', subparsers.add_parser) + + if argcomplete: + argcomplete.autocomplete(arg_parser) + + args = arg_parser.parse_args(sys.argv[1:]) + if args.command is None: + print('Please specify a subcommand to run') + sys.exit(1) + + for subcommand in subcommands: + if subcommand.name == args.command: + command = subcommand + break + else: + # Note: We should never trigger this because argparse should shield us from it + print('Error: {0} was not a recognized subcommand'.format(args.command)) + sys.exit(1) + + try: + retval = command.main(args) + except (errors.DependencyError, errors.MissingUserInput, errors.InvalidUserInput) as e: + print(e) + if args.debug: + raise + sys.exit(2) + + sys.exit(retval) + + +if __name__ == '__main__': + main() diff --git a/hacking/build_library/__init__.py b/hacking/build_library/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/hacking/build_library/__init__.py diff --git a/hacking/build_library/build_ansible/__init__.py b/hacking/build_library/build_ansible/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/hacking/build_library/build_ansible/__init__.py diff --git a/hacking/build_library/build_ansible/announce.py b/hacking/build_library/build_ansible/announce.py new file mode 100644 index 00000000..c245bfb9 --- /dev/null +++ b/hacking/build_library/build_ansible/announce.py @@ -0,0 +1,293 @@ +# coding: utf-8 +# Copyright: (c) 2019, Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +import asyncio +import datetime +import hashlib + +import aiohttp +from jinja2 import Environment, DictLoader + + +VERSION_FRAGMENT = """ +{%- if versions | length > 1 %} + {% for version in versions %} + {% if loop.last %}and {{ pretty_version(version) }}{% else %} + {% if versions | length == 2 %}{{ pretty_version(version) }} {% else %}{{ pretty_version(version) }}, {% endif -%} + {% endif -%} + {% endfor -%} +{%- else %}{{ pretty_version(versions[0]) }}{% endif -%} +""" + +LONG_TEMPLATE = """ +{% set plural = False if versions | length == 1 else True %} +{% set latest_ver = (versions | sort(attribute='ver_obj'))[-1] %} + +To: ansible-releases@redhat.com, ansible-devel@googlegroups.com, ansible-project@googlegroups.com, ansible-announce@googlegroups.com +Subject: New release{% if plural %}s{% endif %}: {{ version_str }} + +{% filter wordwrap %} +Hi all- we're happy to announce that the general release of {{ version_str }}{% if plural %} are{%- else %} is{%- endif %} now available! +{% endfilter %} + + + +How to get it +------------- + +{% for version in versions %} +$ pip install ansible{% if is_ansible_base(version) %}-base{% endif %}=={{ version }} --user +{% if not loop.last %} +or +{% endif %} +{% endfor %} + +The tar.gz of the release{% if plural %}s{% endif %} can be found here: + +{% for version in versions %} +* {{ pretty_version(version) }} +{% if is_ansible_base(version) %} + https://pypi.python.org/packages/source/a/ansible-base/ansible-base-{{ version }}.tar.gz +{% else %} + https://pypi.python.org/packages/source/a/ansible/ansible-{{ version }}.tar.gz +{% endif %} + SHA256: {{ hashes[version] }} +{% endfor %} + + +What's new in {{ version_str }} +{{ '-' * (14 + version_str | length) }} + +{% filter wordwrap %} +{% if plural %}These releases are{% else %}This release is a{% endif %} maintenance release{% if plural %}s{% endif %} containing numerous bugfixes. The full {% if plural %} changelogs are{% else %} changelog is{% endif %} at: +{% endfilter %} + + +{% for version in versions %} +* {{ version }} + https://github.com/ansible/ansible/blob/stable-{{ version.split('.')[:2] | join('.') }}/changelogs/CHANGELOG-v{{ version.split('.')[:2] | join('.') }}.rst +{% endfor %} + + +What's the schedule for future maintenance releases? +---------------------------------------------------- + +{% filter wordwrap %} +Future maintenance releases will occur approximately every 3 weeks. So expect the next one around {{ next_release.strftime('%Y-%m-%d') }}. +{% endfilter %} + + + +Porting Help +------------ + +{% filter wordwrap %} +We've published a porting guide at +https://docs.ansible.com/ansible/devel/porting_guides/porting_guide_{{ latest_ver.split('.')[:2] | join('.') }}.html to help migrate your content to {{ latest_ver.split('.')[:2] | join('.') }}. +{% endfilter %} + + + +{% filter wordwrap %} +If you discover any errors or if any of your working playbooks break when you upgrade to {{ latest_ver }}, please use the following link to report the regression: +{% endfilter %} + + + https://github.com/ansible/ansible/issues/new/choose + +{% filter wordwrap %} +In your issue, be sure to mention the version that works and the one that doesn't. +{% endfilter %} + + +Thanks! + +-{{ name }} + +""" # noqa for E501 (line length). +# jinja2 is horrid about getting rid of extra newlines so we have to have a single per paragraph for +# proper wrapping to occur + +SHORT_TEMPLATE = """ +{% set plural = False if versions | length == 1 else True %} +{% set version = (versions|sort(attribute='ver_obj'))[-1] %} +@ansible +{{ version_str }} +{% if plural %} + have +{% else %} + has +{% endif %} +been released! Get +{% if plural %} +them +{% else %} +it +{% endif %} +on PyPI: pip install ansible{% if is_ansible_base(version) %}-base{% endif %}=={{ version }}, +the Ansible PPA on Launchpad, or GitHub. Happy automating! +""" # noqa for E501 (line length). +# jinja2 is horrid about getting rid of extra newlines so we have to have a single per paragraph for +# proper wrapping to occur + +JINJA_ENV = Environment( + loader=DictLoader({'long': LONG_TEMPLATE, + 'short': SHORT_TEMPLATE, + 'version_string': VERSION_FRAGMENT, + }), + extensions=['jinja2.ext.i18n'], + trim_blocks=True, + lstrip_blocks=True, +) + + +async def calculate_hash_from_tarball(session, version): + tar_url = f'https://pypi.python.org/packages/source/a/ansible-base/ansible-base-{version}.tar.gz' + tar_task = asyncio.create_task(session.get(tar_url)) + tar_response = await tar_task + + tar_hash = hashlib.sha256() + while True: + chunk = await tar_response.content.read(1024) + if not chunk: + break + tar_hash.update(chunk) + + return tar_hash.hexdigest() + + +async def parse_hash_from_file(session, version): + filename = f'ansible-base-{version}.tar.gz' + hash_url = f'https://releases.ansible.com/ansible-base/{filename}.sha' + hash_task = asyncio.create_task(session.get(hash_url)) + hash_response = await hash_task + + hash_content = await hash_response.read() + precreated_hash, precreated_filename = hash_content.split(None, 1) + if filename != precreated_filename.strip().decode('utf-8'): + raise ValueError(f'Hash file contains hash for a different file: {precreated_filename}') + + return precreated_hash.decode('utf-8') + + +async def get_hash(session, version): + calculated_hash = await calculate_hash_from_tarball(session, version) + precreated_hash = await parse_hash_from_file(session, version) + + if calculated_hash != precreated_hash: + raise ValueError(f'Hash in file ansible-base-{version}.tar.gz.sha {precreated_hash} does not' + f' match hash of tarball from pypi {calculated_hash}') + + return calculated_hash + + +async def get_hashes(versions): + hashes = {} + requestors = {} + async with aiohttp.ClientSession() as aio_session: + for version in versions: + requestors[version] = asyncio.create_task(get_hash(aio_session, version)) + + for version, request in requestors.items(): + await request + hashes[version] = request.result() + + return hashes + + +def next_release_date(weeks=3): + days_in_the_future = weeks * 7 + today = datetime.datetime.now() + numeric_today = today.weekday() + + # We release on Thursdays + if numeric_today == 3: + # 3 is Thursday + pass + elif numeric_today == 4: + # If this is Friday, we can adjust back to Thursday for the next release + today -= datetime.timedelta(days=1) + elif numeric_today < 3: + # Otherwise, slide forward to Thursday + today += datetime.timedelta(days=(3 - numeric_today)) + else: + # slightly different formula if it's past Thursday this week. We need to go forward to + # Thursday of next week + today += datetime.timedelta(days=(10 - numeric_today)) + + next_release = today + datetime.timedelta(days=days_in_the_future) + return next_release + + +def is_ansible_base(version): + ''' + Determines if a version is an ansible-base version or not, by checking + if it is >= 2.10.0. Stops comparing when it gets to the first non-numeric + component to allow for .dev and .beta suffixes. + ''' + # Ignore .beta/.dev suffixes + ver_split = [] + for component in version.split('.'): + if not component.isdigit(): + if 'rc' in component: + ver_split.append(int(component.split('rc')[0])) + if 'b' in component: + ver_split.append(int(component.split('b')[0])) + continue + ver_split.append(int(component)) + return tuple(ver_split) >= (2, 10, 0) + + +# Currently only use with a single element list, but left general for later +# in case we need to refer to the releases collectively. +def release_variants(versions): + if all(is_ansible_base(v) for v in versions): + return 'ansible-base' + + if all(not is_ansible_base(v) for v in versions): + return 'Ansible' + + return 'Ansible and ansible-base' + + +def pretty_version(version): + return '{0} {1}'.format( + release_variants([version]), + version, + ) + + +def create_long_message(versions, name): + hashes = asyncio.run(get_hashes(versions)) + + version_template = JINJA_ENV.get_template('version_string') + version_str = version_template.render(versions=versions, + pretty_version=pretty_version).strip() + + next_release = next_release_date() + + template = JINJA_ENV.get_template('long') + message = template.render(versions=versions, version_str=version_str, + name=name, hashes=hashes, next_release=next_release, + is_ansible_base=is_ansible_base, + pretty_version=pretty_version) + return message + + +def create_short_message(versions): + version_template = JINJA_ENV.get_template('version_string') + version_str = version_template.render(versions=versions, + pretty_version=pretty_version).strip() + + template = JINJA_ENV.get_template('short') + message = template.render(versions=versions, version_str=version_str, + is_ansible_base=is_ansible_base, + pretty_version=pretty_version) + message = ' '.join(message.split()) + '\n' + return message diff --git a/hacking/build_library/build_ansible/change_detection.py b/hacking/build_library/build_ansible/change_detection.py new file mode 100644 index 00000000..22e21d3c --- /dev/null +++ b/hacking/build_library/build_ansible/change_detection.py @@ -0,0 +1,33 @@ +# Copyright: (c) 2018, Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +def update_file_if_different(filename, b_data): + """ + Replace file content only if content is different. + + This preserves timestamps in case the file content has not changed. It performs multiple + operations on the file so it is not atomic and may be slower than simply writing to the file. + + :arg filename: The filename to write to + :b_data: Byte string containing the data to write to the file + """ + try: + with open(filename, 'rb') as f: + b_data_old = f.read() + except IOError as e: + if e.errno != 2: + raise + # File did not exist, set b_data_old to a sentinel value so that + # b_data gets written to the filename + b_data_old = None + + if b_data_old != b_data: + with open(filename, 'wb') as f: + f.write(b_data) + return True + + return False diff --git a/hacking/build_library/build_ansible/command_plugins/collection_meta.py b/hacking/build_library/build_ansible/command_plugins/collection_meta.py new file mode 100644 index 00000000..08c20c94 --- /dev/null +++ b/hacking/build_library/build_ansible/command_plugins/collection_meta.py @@ -0,0 +1,72 @@ +# coding: utf-8 +# Copyright: (c) 2019, Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import os.path +import pathlib + +import yaml +from ansible.module_utils.six import string_types +from ansible.module_utils._text import to_bytes +from antsibull.jinja2.environment import doc_environment + +# Pylint doesn't understand Python3 namespace modules. +from ..change_detection import update_file_if_different # pylint: disable=relative-beyond-top-level +from ..commands import Command # pylint: disable=relative-beyond-top-level + + +DEFAULT_TEMPLATE_FILE = 'collections_galaxy_meta.rst.j2' +DEFAULT_TEMPLATE_DIR = pathlib.Path(__file__).parents[4] / 'docs/templates' + + +def normalize_options(options): + """Normalize the options to make for easy templating""" + for opt in options: + if isinstance(opt['description'], string_types): + opt['description'] = [opt['description']] + + +class DocumentCollectionMeta(Command): + name = 'collection-meta' + + @classmethod + def init_parser(cls, add_parser): + parser = add_parser(cls.name, description='Generate collection galaxy.yml documentation from shared metadata') + parser.add_argument("-t", "--template-file", action="store", dest="template_file", + default=DEFAULT_TEMPLATE_FILE, + help="Jinja2 template to use for the config") + parser.add_argument("-T", "--template-dir", action="store", dest="template_dir", + default=str(DEFAULT_TEMPLATE_DIR), + help="directory containing Jinja2 templates") + parser.add_argument("-o", "--output-dir", action="store", dest="output_dir", default='/tmp/', + help="Output directory for rst files") + parser.add_argument("collection_defs", metavar="COLLECTION-OPTION-DEFINITIONS.yml", type=str, + help="Source for collection metadata option docs") + + @staticmethod + def main(args): + output_dir = os.path.abspath(args.output_dir) + template_file_full_path = os.path.abspath(os.path.join(args.template_dir, args.template_file)) + template_file = os.path.basename(template_file_full_path) + template_dir = os.path.dirname(template_file_full_path) + + with open(args.collection_defs) as f: + options = yaml.safe_load(f) + + normalize_options(options) + + env = doc_environment(template_dir) + + template = env.get_template(template_file) + output_name = os.path.join(output_dir, template_file.replace('.j2', '')) + temp_vars = {'options': options} + + data = to_bytes(template.render(temp_vars)) + update_file_if_different(output_name, data) + + return 0 diff --git a/hacking/build_library/build_ansible/command_plugins/docs_build.py b/hacking/build_library/build_ansible/command_plugins/docs_build.py new file mode 100644 index 00000000..e38ef201 --- /dev/null +++ b/hacking/build_library/build_ansible/command_plugins/docs_build.py @@ -0,0 +1,226 @@ +# coding: utf-8 +# Copyright: (c) 2020, Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +# Make coding more python3-ish +from __future__ import absolute_import, division, print_function + +import glob +import os +import os.path +import pathlib +import shutil +from tempfile import TemporaryDirectory + +import yaml + +from ansible.release import __version__ as ansible_base__version__ + +# Pylint doesn't understand Python3 namespace modules. +# pylint: disable=relative-beyond-top-level +from ..commands import Command +from ..errors import InvalidUserInput, MissingUserInput +# pylint: enable=relative-beyond-top-level + + +__metaclass__ = type + + +DEFAULT_TOP_DIR = pathlib.Path(__file__).parents[4] +DEFAULT_OUTPUT_DIR = pathlib.Path(__file__).parents[4] / 'docs/docsite' + + +class NoSuchFile(Exception): + """An expected file was not found.""" + + +# +# Helpers +# + +def find_latest_ansible_dir(build_data_working): + """Find the most recent ansible major version.""" + # imports here so that they don't cause unnecessary deps for all of the plugins + from packaging.version import InvalidVersion, Version + + ansible_directories = glob.glob(os.path.join(build_data_working, '[0-9.]*')) + + # Find the latest ansible version directory + latest = None + latest_ver = Version('0') + for directory_name in (d for d in ansible_directories if os.path.isdir(d)): + try: + new_version = Version(os.path.basename(directory_name)) + except InvalidVersion: + continue + + if new_version > latest_ver: + latest_ver = new_version + latest = directory_name + + if latest is None: + raise NoSuchFile('Could not find an ansible data directory in {0}'.format(build_data_working)) + + return latest + + +def find_latest_deps_file(build_data_working, ansible_version): + """Find the most recent ansible deps file for the given ansible major version.""" + # imports here so that they don't cause unnecessary deps for all of the plugins + from packaging.version import Version + + data_dir = os.path.join(build_data_working, ansible_version) + deps_files = glob.glob(os.path.join(data_dir, '*.deps')) + if not deps_files: + raise Exception('No deps files exist for version {0}'.format(ansible_version)) + + # Find the latest version of the deps file for this major version + latest = None + latest_ver = Version('0') + for filename in deps_files: + with open(filename, 'r') as f: + deps_data = yaml.safe_load(f.read()) + new_version = Version(deps_data['_ansible_version']) + if new_version > latest_ver: + latest_ver = new_version + latest = filename + + if latest is None: + raise NoSuchFile('Could not find an ansible deps file in {0}'.format(data_dir)) + + return latest + + +# +# Subcommand base +# + +def generate_base_docs(args): + """Regenerate the documentation for all plugins listed in the plugin_to_collection_file.""" + # imports here so that they don't cause unnecessary deps for all of the plugins + from antsibull.cli import antsibull_docs + + with TemporaryDirectory() as tmp_dir: + # + # Construct a deps file with our version of ansible_base in it + # + modified_deps_file = os.path.join(tmp_dir, 'ansible.deps') + + # The _ansible_version doesn't matter since we're only building docs for base + deps_file_contents = {'_ansible_version': ansible_base__version__, + '_ansible_base_version': ansible_base__version__} + + with open(modified_deps_file, 'w') as f: + f.write(yaml.dump(deps_file_contents)) + + # Generate the plugin rst + return antsibull_docs.run(['antsibull-docs', 'stable', '--deps-file', modified_deps_file, + '--ansible-base-source', str(args.top_dir), + '--dest-dir', args.output_dir]) + + # If we make this more than just a driver for antsibull: + # Run other rst generation + # Run sphinx build + + +# +# Subcommand full +# + +def generate_full_docs(args): + """Regenerate the documentation for all plugins listed in the plugin_to_collection_file.""" + # imports here so that they don't cause unnecessary deps for all of the plugins + import sh + from antsibull.cli import antsibull_docs + + with TemporaryDirectory() as tmp_dir: + sh.git(['clone', 'https://github.com/ansible-community/ansible-build-data'], _cwd=tmp_dir) + # If we want to validate that the ansible version and ansible-base branch version match, + # this would be the place to do it. + + build_data_working = os.path.join(tmp_dir, 'ansible-build-data') + + ansible_version = args.ansible_version + if ansible_version is None: + ansible_version = find_latest_ansible_dir(build_data_working) + + latest_filename = find_latest_deps_file(build_data_working, ansible_version) + + # Make a copy of the deps file so that we can set the ansible-base version we'll use + modified_deps_file = os.path.join(tmp_dir, 'ansible.deps') + shutil.copyfile(latest_filename, modified_deps_file) + + # Put our version of ansible-base into the deps file + with open(modified_deps_file, 'r') as f: + deps_data = yaml.safe_load(f.read()) + + deps_data['_ansible_base_version'] = ansible_base__version__ + + with open(modified_deps_file, 'w') as f: + f.write(yaml.dump(deps_data)) + + # Generate the plugin rst + return antsibull_docs.run(['antsibull-docs', 'stable', '--deps-file', modified_deps_file, + '--ansible-base-source', str(args.top_dir), + '--dest-dir', args.output_dir]) + + # If we make this more than just a driver for antsibull: + # Run other rst generation + # Run sphinx build + + +class CollectionPluginDocs(Command): + name = 'docs-build' + _ACTION_HELP = """Action to perform. + full: Regenerate the rst for the full ansible website. + base: Regenerate the rst for plugins in ansible-base and then build the website. + named: Regenerate the rst for the named plugins and then build the website. + """ + + @classmethod + def init_parser(cls, add_parser): + parser = add_parser(cls.name, + description='Generate documentation for plugins in collections.' + ' Plugins in collections will have a stub file in the normal plugin' + ' documentation location that says the module is in a collection and' + ' point to generated plugin documentation under the collections/' + ' hierarchy.') + # I think we should make the actions a subparser but need to look in git history and see if + # we tried that and changed it for some reason. + parser.add_argument('action', action='store', choices=('full', 'base', 'named'), + default='full', help=cls._ACTION_HELP) + parser.add_argument("-o", "--output-dir", action="store", dest="output_dir", + default=DEFAULT_OUTPUT_DIR, + help="Output directory for generated doc files") + parser.add_argument("-t", "--top-dir", action="store", dest="top_dir", + default=DEFAULT_TOP_DIR, + help="Toplevel directory of this ansible-base checkout or expanded" + " tarball.") + parser.add_argument("-l", "--limit-to-modules", '--limit-to', action="store", + dest="limit_to", default=None, + help="Limit building module documentation to comma-separated list of" + " plugins. Specify non-existing plugin name for no plugins.") + parser.add_argument('--ansible-version', action='store', + dest='ansible_version', default=None, + help='The version of the ansible package to make documentation for.' + ' This only makes sense when used with full.') + + @staticmethod + def main(args): + # normalize and validate CLI args + + if args.ansible_version and args.action != 'full': + raise InvalidUserInput('--ansible-version is only for use with "full".') + + if not args.output_dir: + args.output_dir = os.path.abspath(str(DEFAULT_OUTPUT_DIR)) + + if args.action == 'full': + return generate_full_docs(args) + + if args.action == 'base': + return generate_base_docs(args) + # args.action == 'named' (Invalid actions are caught by argparse) + raise NotImplementedError('Building docs for specific files is not yet implemented') + + # return 0 diff --git a/hacking/build_library/build_ansible/command_plugins/dump_config.py b/hacking/build_library/build_ansible/command_plugins/dump_config.py new file mode 100644 index 00000000..7811f465 --- /dev/null +++ b/hacking/build_library/build_ansible/command_plugins/dump_config.py @@ -0,0 +1,76 @@ +# coding: utf-8 +# Copyright: (c) 2019, Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import os.path +import pathlib + +import yaml +from jinja2 import Environment, FileSystemLoader +from ansible.module_utils._text import to_bytes + +# Pylint doesn't understand Python3 namespace modules. +from ..change_detection import update_file_if_different # pylint: disable=relative-beyond-top-level +from ..commands import Command # pylint: disable=relative-beyond-top-level + + +DEFAULT_TEMPLATE_FILE = 'config.rst.j2' +DEFAULT_TEMPLATE_DIR = pathlib.Path(__file__).parents[4] / 'docs/templates' + + +def fix_description(config_options): + '''some descriptions are strings, some are lists. workaround it...''' + + for config_key in config_options: + description = config_options[config_key].get('description', []) + if isinstance(description, list): + desc_list = description + else: + desc_list = [description] + config_options[config_key]['description'] = desc_list + return config_options + + +class DocumentConfig(Command): + name = 'document-config' + + @classmethod + def init_parser(cls, add_parser): + parser = add_parser(cls.name, description='Generate module documentation from metadata') + parser.add_argument("-t", "--template-file", action="store", dest="template_file", + default=DEFAULT_TEMPLATE_FILE, + help="Jinja2 template to use for the config") + parser.add_argument("-T", "--template-dir", action="store", dest="template_dir", + default=str(DEFAULT_TEMPLATE_DIR), + help="directory containing Jinja2 templates") + parser.add_argument("-o", "--output-dir", action="store", dest="output_dir", default='/tmp/', + help="Output directory for rst files") + parser.add_argument("config_defs", metavar="CONFIG-OPTION-DEFINITIONS.yml", type=str, + help="Source for config option docs") + + @staticmethod + def main(args): + output_dir = os.path.abspath(args.output_dir) + template_file_full_path = os.path.abspath(os.path.join(args.template_dir, args.template_file)) + template_file = os.path.basename(template_file_full_path) + template_dir = os.path.dirname(template_file_full_path) + + with open(args.config_defs) as f: + config_options = yaml.safe_load(f) + + config_options = fix_description(config_options) + + env = Environment(loader=FileSystemLoader(template_dir), trim_blocks=True,) + template = env.get_template(template_file) + output_name = os.path.join(output_dir, template_file.replace('.j2', '')) + temp_vars = {'config_options': config_options} + + data = to_bytes(template.render(temp_vars)) + update_file_if_different(output_name, data) + + return 0 diff --git a/hacking/build_library/build_ansible/command_plugins/dump_keywords.py b/hacking/build_library/build_ansible/command_plugins/dump_keywords.py new file mode 100644 index 00000000..2fc6e5d2 --- /dev/null +++ b/hacking/build_library/build_ansible/command_plugins/dump_keywords.py @@ -0,0 +1,121 @@ +# coding: utf-8 +# Copyright: (c) 2019, Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import importlib +import os.path +import pathlib +import re +from distutils.version import LooseVersion + +import jinja2 +import yaml +from jinja2 import Environment, FileSystemLoader + +from ansible.module_utils._text import to_bytes + +# Pylint doesn't understand Python3 namespace modules. +from ..change_detection import update_file_if_different # pylint: disable=relative-beyond-top-level +from ..commands import Command # pylint: disable=relative-beyond-top-level + + +DEFAULT_TEMPLATE_DIR = str(pathlib.Path(__file__).resolve().parents[4] / 'docs/templates') +TEMPLATE_FILE = 'playbooks_keywords.rst.j2' +PLAYBOOK_CLASS_NAMES = ['Play', 'Role', 'Block', 'Task'] + + +def load_definitions(keyword_definitions_file): + docs = {} + with open(keyword_definitions_file) as f: + docs = yaml.safe_load(f) + + return docs + + +def extract_keywords(keyword_definitions): + pb_keywords = {} + for pb_class_name in PLAYBOOK_CLASS_NAMES: + if pb_class_name == 'Play': + module_name = 'ansible.playbook' + else: + module_name = 'ansible.playbook.{0}'.format(pb_class_name.lower()) + module = importlib.import_module(module_name) + playbook_class = getattr(module, pb_class_name, None) + if playbook_class is None: + raise ImportError("We weren't able to import the module {0}".format(module_name)) + + # Maintain order of the actual class names for our output + # Build up a mapping of playbook classes to the attributes that they hold + pb_keywords[pb_class_name] = {k: v for (k, v) in playbook_class._valid_attrs.items() + # Filter private attributes as they're not usable in playbooks + if not v.private} + + # pick up definitions if they exist + for keyword in tuple(pb_keywords[pb_class_name]): + if keyword in keyword_definitions: + pb_keywords[pb_class_name][keyword] = keyword_definitions[keyword] + else: + # check if there is an alias, otherwise undocumented + alias = getattr(getattr(playbook_class, '_%s' % keyword), 'alias', None) + if alias and alias in keyword_definitions: + pb_keywords[pb_class_name][alias] = keyword_definitions[alias] + del pb_keywords[pb_class_name][keyword] + else: + pb_keywords[pb_class_name][keyword] = ' UNDOCUMENTED!! ' + + # loop is really with_ for users + if pb_class_name == 'Task': + pb_keywords[pb_class_name]['with_<lookup_plugin>'] = ( + 'The same as ``loop`` but magically adds the output of any lookup plugin to' + ' generate the item list.') + + # local_action is implicit with action + if 'action' in pb_keywords[pb_class_name]: + pb_keywords[pb_class_name]['local_action'] = ('Same as action but also implies' + ' ``delegate_to: localhost``') + + return pb_keywords + + +def generate_page(pb_keywords, template_dir): + env = Environment(loader=FileSystemLoader(template_dir), trim_blocks=True,) + template = env.get_template(TEMPLATE_FILE) + tempvars = {'pb_keywords': pb_keywords, 'playbook_class_names': PLAYBOOK_CLASS_NAMES} + + keyword_page = template.render(tempvars) + if LooseVersion(jinja2.__version__) < LooseVersion('2.10'): + # jinja2 < 2.10's indent filter indents blank lines. Cleanup + keyword_page = re.sub(' +\n', '\n', keyword_page) + + return keyword_page + + +class DocumentKeywords(Command): + name = 'document-keywords' + + @classmethod + def init_parser(cls, add_parser): + parser = add_parser(cls.name, description='Generate playbook keyword documentation from' + ' code and descriptions') + parser.add_argument("-T", "--template-dir", action="store", dest="template_dir", + default=DEFAULT_TEMPLATE_DIR, + help="directory containing Jinja2 templates") + parser.add_argument("-o", "--output-dir", action="store", dest="output_dir", + default='/tmp/', help="Output directory for rst files") + parser.add_argument("keyword_defs", metavar="KEYWORD-DEFINITIONS.yml", type=str, + help="Source for playbook keyword docs") + + @staticmethod + def main(args): + keyword_definitions = load_definitions(args.keyword_defs) + pb_keywords = extract_keywords(keyword_definitions) + + keyword_page = generate_page(pb_keywords, args.template_dir) + outputname = os.path.join(args.output_dir, TEMPLATE_FILE.replace('.j2', '')) + update_file_if_different(outputname, to_bytes(keyword_page)) + + return 0 diff --git a/hacking/build_library/build_ansible/command_plugins/file_deprecated_issues.py b/hacking/build_library/build_ansible/command_plugins/file_deprecated_issues.py new file mode 100644 index 00000000..139ecc4d --- /dev/null +++ b/hacking/build_library/build_ansible/command_plugins/file_deprecated_issues.py @@ -0,0 +1,153 @@ +# -*- coding: utf-8 -*- +# (c) 2017, Matt Martz <matt@sivel.net> +# (c) 2019, Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +import argparse +import os +import time + +from collections import defaultdict + +from ansible.release import __version__ as ansible_version + +# Pylint doesn't understand Python3 namespace modules. +from ..commands import Command # pylint: disable=relative-beyond-top-level +from .. import errors # pylint: disable=relative-beyond-top-level + +ANSIBLE_MAJOR_VERSION = '.'.join(ansible_version.split('.')[:2]) + + +def get_token(token_file): + if token_file: + return token_file.read().strip() + + token = os.getenv('GITHUB_TOKEN').strip() + if not token: + raise errors.MissingUserInput( + 'Please provide a file containing a github oauth token with public_repo scope' + ' via the --github-token argument or set the GITHUB_TOKEN env var with your' + ' github oauth token' + ) + return token + + +def parse_deprecations(problems_file_handle): + deprecated = defaultdict(list) + deprecation_errors = problems_file_handle.read() + for line in deprecation_errors.splitlines(): + path = line.split(':')[0] + if path.endswith('__init__.py'): + component = os.path.basename(os.path.dirname(path)) + else: + component, dummy = os.path.splitext(os.path.basename(path).lstrip('_')) + + title = ( + '%s contains deprecated call to be removed in %s' % + (component, ANSIBLE_MAJOR_VERSION) + ) + deprecated[component].append( + dict(title=title, path=path, line=line) + ) + return deprecated + + +def find_project_todo_column(repo, project_name): + project = None + for project in repo.projects(): + if project.name.lower() == project_name: + break + else: + raise errors.InvalidUserInput('%s was an invalid project name' % project_name) + + for project_column in project.columns(): + column_name = project_column.name.lower() + if 'todo' in column_name or 'backlog' in column_name or 'to do' in column_name: + return project_column + + raise Exception('Unable to determine the todo column in' + ' project %s' % project_name) + + +def create_issues(deprecated, body_tmpl, repo): + issues = [] + + for component, items in deprecated.items(): + title = items[0]['title'] + path = '\n'.join(set((i['path']) for i in items)) + line = '\n'.join(i['line'] for i in items) + body = body_tmpl % dict(component=component, path=path, + line=line, + version=ANSIBLE_MAJOR_VERSION) + + issue = repo.create_issue(title, body=body, labels=['deprecated']) + print(issue) + issues.append(issue) + + # Sleep a little, so that the API doesn't block us + time.sleep(0.5) + + return issues + + +class FileDeprecationTickets(Command): + name = 'file-deprecation-tickets' + + @classmethod + def init_parser(cls, add_parser): + parser = add_parser(cls.name, description='File tickets to cleanup deprecated features for' + ' the next release') + parser.add_argument('--template', default='deprecated_issue_template.md', + type=argparse.FileType('r'), + help='Path to markdown file template to be used for issue ' + 'body. Default: %(default)s') + parser.add_argument('--project-name', default='', type=str, + help='Name of a github project to assign all issues to') + parser.add_argument('--github-token', type=argparse.FileType('r'), + help='Path to file containing a github token with public_repo scope.' + ' This token in this file will be used to open the deprcation' + ' tickets and add them to the github project. If not given,' + ' the GITHUB_TOKEN environment variable will be tried') + parser.add_argument('problems', type=argparse.FileType('r'), + help='Path to file containing pylint output for the ' + 'ansible-deprecated-version check') + + @staticmethod + def main(args): + try: + from github3 import GitHub + except ImportError: + raise errors.DependencyError( + 'This command needs the github3.py library installed to work' + ) + + token = get_token(args.github_token) + args.github_token.close() + + deprecated = parse_deprecations(args.problems) + args.problems.close() + + body_tmpl = args.template.read() + args.template.close() + + project_name = args.project_name.strip().lower() + + gh_conn = GitHub(token=token) + repo = gh_conn.repository('abadger', 'ansible') + + if project_name: + project_column = find_project_todo_column(repo, project_name) + + issues = create_issues(deprecated, body_tmpl, repo) + + if project_column: + for issue in issues: + project_column.create_card_with_issue(issue) + time.sleep(0.5) + + return 0 diff --git a/hacking/build_library/build_ansible/command_plugins/generate_man.py b/hacking/build_library/build_ansible/command_plugins/generate_man.py new file mode 100644 index 00000000..3795c0d2 --- /dev/null +++ b/hacking/build_library/build_ansible/command_plugins/generate_man.py @@ -0,0 +1,303 @@ +# coding: utf-8 +# Copyright: (c) 2019, Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +import argparse +import os.path +import pathlib +import sys + +from jinja2 import Environment, FileSystemLoader + +from ansible.module_utils._text import to_bytes + +# Pylint doesn't understand Python3 namespace modules. +from ..change_detection import update_file_if_different # pylint: disable=relative-beyond-top-level +from ..commands import Command # pylint: disable=relative-beyond-top-level + + +DEFAULT_TEMPLATE_FILE = pathlib.Path(__file__).parents[4] / 'docs/templates/man.j2' + + +# from https://www.python.org/dev/peps/pep-0257/ +def trim_docstring(docstring): + if not docstring: + return '' + # Convert tabs to spaces (following the normal Python rules) + # and split into a list of lines: + lines = docstring.expandtabs().splitlines() + # Determine minimum indentation (first line doesn't count): + indent = sys.maxsize + for line in lines[1:]: + stripped = line.lstrip() + if stripped: + indent = min(indent, len(line) - len(stripped)) + # Remove indentation (first line is special): + trimmed = [lines[0].strip()] + if indent < sys.maxsize: + for line in lines[1:]: + trimmed.append(line[indent:].rstrip()) + # Strip off trailing and leading blank lines: + while trimmed and not trimmed[-1]: + trimmed.pop() + while trimmed and not trimmed[0]: + trimmed.pop(0) + # Return a single string: + return '\n'.join(trimmed) + + +def get_options(optlist): + ''' get actual options ''' + + opts = [] + for opt in optlist: + res = { + 'desc': opt.help, + 'options': opt.option_strings + } + if isinstance(opt, argparse._StoreAction): + res['arg'] = opt.dest.upper() + elif not res['options']: + continue + opts.append(res) + + return opts + + +def dedupe_groups(parser): + action_groups = [] + for action_group in parser._action_groups: + found = False + for a in action_groups: + if a._actions == action_group._actions: + found = True + break + if not found: + action_groups.append(action_group) + return action_groups + + +def get_option_groups(option_parser): + groups = [] + for action_group in dedupe_groups(option_parser)[1:]: + group_info = {} + group_info['desc'] = action_group.description + group_info['options'] = action_group._actions + group_info['group_obj'] = action_group + groups.append(group_info) + return groups + + +def opt_doc_list(parser): + ''' iterate over options lists ''' + + results = [] + for option_group in dedupe_groups(parser)[1:]: + results.extend(get_options(option_group._actions)) + + results.extend(get_options(parser._actions)) + + return results + + +# def opts_docs(cli, name): +def opts_docs(cli_class_name, cli_module_name): + ''' generate doc structure from options ''' + + cli_name = 'ansible-%s' % cli_module_name + if cli_module_name == 'adhoc': + cli_name = 'ansible' + + # WIth no action/subcommand + # shared opts set + # instantiate each cli and ask its options + cli_klass = getattr(__import__("ansible.cli.%s" % cli_module_name, + fromlist=[cli_class_name]), cli_class_name) + cli = cli_klass([cli_name]) + + # parse the common options + try: + cli.init_parser() + except Exception: + pass + + # base/common cli info + docs = { + 'cli': cli_module_name, + 'cli_name': cli_name, + 'usage': cli.parser.format_usage(), + 'short_desc': cli.parser.description, + 'long_desc': trim_docstring(cli.__doc__), + 'actions': {}, + 'content_depth': 2, + } + option_info = {'option_names': [], + 'options': [], + 'groups': []} + + for extras in ('ARGUMENTS'): + if hasattr(cli, extras): + docs[extras.lower()] = getattr(cli, extras) + + common_opts = opt_doc_list(cli.parser) + groups_info = get_option_groups(cli.parser) + shared_opt_names = [] + for opt in common_opts: + shared_opt_names.extend(opt.get('options', [])) + + option_info['options'] = common_opts + option_info['option_names'] = shared_opt_names + + option_info['groups'].extend(groups_info) + + docs.update(option_info) + + # now for each action/subcommand + # force populate parser with per action options + + def get_actions(parser, docs): + # use class attrs not the attrs on a instance (not that it matters here...) + try: + subparser = parser._subparsers._group_actions[0].choices + except AttributeError: + subparser = {} + + depth = 0 + + for action, parser in subparser.items(): + action_info = {'option_names': [], + 'options': [], + 'actions': {}} + # docs['actions'][action] = {} + # docs['actions'][action]['name'] = action + action_info['name'] = action + action_info['desc'] = trim_docstring(getattr(cli, 'execute_%s' % action).__doc__) + + # docs['actions'][action]['desc'] = getattr(cli, 'execute_%s' % action).__doc__.strip() + action_doc_list = opt_doc_list(parser) + + uncommon_options = [] + for action_doc in action_doc_list: + # uncommon_options = [] + + option_aliases = action_doc.get('options', []) + for option_alias in option_aliases: + + if option_alias in shared_opt_names: + continue + + # TODO: use set + if option_alias not in action_info['option_names']: + action_info['option_names'].append(option_alias) + + if action_doc in action_info['options']: + continue + + uncommon_options.append(action_doc) + + action_info['options'] = uncommon_options + + depth = 1 + get_actions(parser, action_info) + + docs['actions'][action] = action_info + + return depth + + action_depth = get_actions(cli.parser, docs) + docs['content_depth'] = action_depth + 1 + + docs['options'] = opt_doc_list(cli.parser) + return docs + + +class GenerateMan(Command): + name = 'generate-man' + + @classmethod + def init_parser(cls, add_parser): + parser = add_parser(name=cls.name, + description='Generate cli documentation from cli docstrings') + + parser.add_argument("-t", "--template-file", action="store", dest="template_file", + default=DEFAULT_TEMPLATE_FILE, help="path to jinja2 template") + parser.add_argument("-o", "--output-dir", action="store", dest="output_dir", + default='/tmp/', help="Output directory for rst files") + parser.add_argument("-f", "--output-format", action="store", dest="output_format", + default='man', + help="Output format for docs (the default 'man' or 'rst')") + parser.add_argument('cli_modules', help='CLI module name(s)', metavar='MODULE_NAME', nargs='*') + + @staticmethod + def main(args): + template_file = args.template_file + template_path = os.path.expanduser(template_file) + template_dir = os.path.abspath(os.path.dirname(template_path)) + template_basename = os.path.basename(template_file) + + output_dir = os.path.abspath(args.output_dir) + output_format = args.output_format + + cli_modules = args.cli_modules + + # various cli parsing things checks sys.argv if the 'args' that are passed in are [] + # so just remove any args so the cli modules dont try to parse them resulting in warnings + sys.argv = [sys.argv[0]] + + allvars = {} + output = {} + cli_list = [] + cli_bin_name_list = [] + + # for binary in os.listdir('../../lib/ansible/cli'): + for cli_module_name in cli_modules: + binary = os.path.basename(os.path.expanduser(cli_module_name)) + + if not binary.endswith('.py'): + continue + elif binary == '__init__.py': + continue + + cli_name = os.path.splitext(binary)[0] + + if cli_name == 'adhoc': + cli_class_name = 'AdHocCLI' + # myclass = 'AdHocCLI' + output[cli_name] = 'ansible.1.rst.in' + cli_bin_name = 'ansible' + else: + # myclass = "%sCLI" % libname.capitalize() + cli_class_name = "%sCLI" % cli_name.capitalize() + output[cli_name] = 'ansible-%s.1.rst.in' % cli_name + cli_bin_name = 'ansible-%s' % cli_name + + # FIXME: + allvars[cli_name] = opts_docs(cli_class_name, cli_name) + cli_bin_name_list.append(cli_bin_name) + + cli_list = allvars.keys() + + doc_name_formats = {'man': '%s.1.rst.in', + 'rst': '%s.rst'} + + for cli_name in cli_list: + + # template it! + env = Environment(loader=FileSystemLoader(template_dir)) + template = env.get_template(template_basename) + + # add rest to vars + tvars = allvars[cli_name] + tvars['cli_list'] = cli_list + tvars['cli_bin_name_list'] = cli_bin_name_list + tvars['cli'] = cli_name + if '-i' in tvars['options']: + print('uses inventory') + + manpage = template.render(tvars) + filename = os.path.join(output_dir, doc_name_formats[output_format] % tvars['cli_name']) + update_file_if_different(filename, to_bytes(manpage)) diff --git a/hacking/build_library/build_ansible/command_plugins/porting_guide.py b/hacking/build_library/build_ansible/command_plugins/porting_guide.py new file mode 100644 index 00000000..40097a3f --- /dev/null +++ b/hacking/build_library/build_ansible/command_plugins/porting_guide.py @@ -0,0 +1,138 @@ +# coding: utf-8 +# Copyright: (c) 2019, Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +from jinja2 import Environment, DictLoader + +# Pylint doesn't understand Python3 namespace modules. +from ..commands import Command # pylint: disable=relative-beyond-top-level + + +PORTING_GUIDE_TEMPLATE = """ +.. _porting_{{ ver }}_guide: + +************************** +Ansible {{ ver }} Porting Guide +************************** + +This section discusses the behavioral changes between Ansible {{ prev_ver }} and Ansible {{ ver }}. + +It is intended to assist in updating your playbooks, plugins and other parts of your Ansible infrastructure so they will work with this version of Ansible. + +We suggest you read this page along with `Ansible Changelog for {{ ver }} <https://github.com/ansible/ansible/blob/devel/changelogs/CHANGELOG-v{{ ver }}.rst>`_ to understand what updates you may need to make. + +This document is part of a collection on porting. The complete list of porting guides can be found at :ref:`porting guides <porting_guides>`. + +.. contents:: Topics + + +Playbook +======== + +No notable changes + + +Command Line +============ + +No notable changes + + +Deprecated +========== + +No notable changes + + +Modules +======= + +No notable changes + + +Modules removed +--------------- + +The following modules no longer exist: + +* No notable changes + + +Deprecation notices +------------------- + +No notable changes + + +Noteworthy module changes +------------------------- + +No notable changes + + +Plugins +======= + +No notable changes + + +Porting custom scripts +====================== + +No notable changes + + +Networking +========== + +No notable changes + +""" # noqa for E501 (line length). +# jinja2 is horrid about getting rid of extra newlines so we have to have a single line per +# paragraph for proper wrapping to occur + +JINJA_ENV = Environment( + loader=DictLoader({'porting_guide': PORTING_GUIDE_TEMPLATE, + }), + extensions=['jinja2.ext.i18n'], + trim_blocks=True, + lstrip_blocks=True, +) + + +def generate_porting_guide(version): + template = JINJA_ENV.get_template('porting_guide') + + version_list = version.split('.') + version_list[-1] = str(int(version_list[-1]) - 1) + previous_version = '.'.join(version_list) + + content = template.render(ver=version, prev_ver=previous_version) + return content + + +def write_guide(version, guide_content): + filename = 'porting_guide_{0}.rst'.format(version) + with open(filename, 'w') as out_file: + out_file.write(guide_content) + + +class PortingGuideCommand(Command): + name = 'porting-guide' + + @classmethod + def init_parser(cls, add_parser): + parser = add_parser(cls.name, description="Generate a fresh porting guide template") + parser.add_argument("--version", dest="version", type=str, required=True, action='store', + help="Version of Ansible to write the porting guide for") + + @staticmethod + def main(args): + guide_content = generate_porting_guide(args.version) + write_guide(args.version, guide_content) + return 0 diff --git a/hacking/build_library/build_ansible/command_plugins/release_announcement.py b/hacking/build_library/build_ansible/command_plugins/release_announcement.py new file mode 100644 index 00000000..620dda0d --- /dev/null +++ b/hacking/build_library/build_ansible/command_plugins/release_announcement.py @@ -0,0 +1,78 @@ +# coding: utf-8 +# Copyright: (c) 2019, Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +import sys +from collections import UserString +from distutils.version import LooseVersion + +# Pylint doesn't understand Python3 namespace modules. +from ..commands import Command # pylint: disable=relative-beyond-top-level +from .. import errors # pylint: disable=relative-beyond-top-level + + +class VersionStr(UserString): + def __init__(self, string): + super().__init__(string.strip()) + self.ver_obj = LooseVersion(string) + + +def transform_args(args): + # Make it possible to sort versions in the jinja2 templates + new_versions = [] + for version in args.versions: + new_versions.append(VersionStr(version)) + args.versions = new_versions + + return args + + +def write_message(filename, message): + if filename != '-': + with open(filename, 'w') as out_file: + out_file.write(message) + else: + sys.stdout.write('\n\n') + sys.stdout.write(message) + + +class ReleaseAnnouncementCommand(Command): + name = 'release-announcement' + + @classmethod + def init_parser(cls, add_parser): + parser = add_parser(cls.name, + description="Generate email and twitter announcements from template") + + parser.add_argument("--version", dest="versions", type=str, required=True, action='append', + help="Versions of Ansible to announce") + parser.add_argument("--name", type=str, required=True, help="Real name to use on emails") + parser.add_argument("--email-out", type=str, default="-", + help="Filename to place the email announcement into") + parser.add_argument("--twitter-out", type=str, default="-", + help="Filename to place the twitter announcement into") + + @classmethod + def main(cls, args): + if sys.version_info < (3, 6): + raise errors.DependencyError('The {0} subcommand needs Python-3.6+' + ' to run'.format(cls.name)) + + # Import here because these functions are invalid on Python-3.5 and the command plugins and + # init_parser() method need to be compatible with Python-3.4+ for now. + # Pylint doesn't understand Python3 namespace modules. + from .. announce import create_short_message, create_long_message # pylint: disable=relative-beyond-top-level + + args = transform_args(args) + + twitter_message = create_short_message(args.versions) + email_message = create_long_message(args.versions, args.name) + + write_message(args.twitter_out, twitter_message) + write_message(args.email_out, email_message) + return 0 diff --git a/hacking/build_library/build_ansible/command_plugins/update_intersphinx.py b/hacking/build_library/build_ansible/command_plugins/update_intersphinx.py new file mode 100644 index 00000000..9337859f --- /dev/null +++ b/hacking/build_library/build_ansible/command_plugins/update_intersphinx.py @@ -0,0 +1,101 @@ +# -*- coding: utf-8 -*- +# (c) 2020, Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +import argparse +import importlib +import os +import pathlib +import time +import urllib.parse + +from collections import defaultdict + +from ansible.module_utils.common.collections import is_iterable +from ansible.module_utils.urls import Request + +# Pylint doesn't understand Python3 namespace modules. +from ..commands import Command # pylint: disable=relative-beyond-top-level +from .. import errors # pylint: disable=relative-beyond-top-level + + +EXAMPLE_CONF = """ +A proper intersphinx_mapping entry should look like: + intersphinx_mapping = { + 'python3': ('https://docs.python.org/3', (None, 'python3.inv')) + } + +See the intersphinx docs for more info: + https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html#confval-intersphinx_mapping +""" + + +class UpdateIntersphinxCache(Command): + name = 'update-intersphinx-cache' + + @classmethod + def init_parser(cls, add_parser): + parser = add_parser(cls.name, description='Update cached intersphinx mappings. This' + ' updates the cached intersphinx mappings for docs to reference' + ' documentation from other projects.') + parser.add_argument('-o', '--output-dir', action='store', + help='Path to directory the cached objects.inv files are stored in') + parser.add_argument('-c', '--conf-file', action='store', + help='Path to a sphinx config file to retrieve intersphinx config from') + + @staticmethod + def main(args): + # Retrieve the intersphinx information from the sphinx config file + conf_dir = pathlib.Path(args.conf_file).parent + + conf_module_spec = importlib.util.spec_from_file_location('sphinxconf', args.conf_file) + conf_module = importlib.util.module_from_spec(conf_module_spec) + conf_module_spec.loader.exec_module(conf_module) + intersphinx_mapping = conf_module.intersphinx_mapping + + for intersphinx_name, inventory in intersphinx_mapping.items(): + if not is_iterable(inventory) or len(inventory) != 2: + print('WARNING: The intersphinx entry for {0} must be' + ' a two-tuple.\n{1}'.format(intersphinx_name, EXAMPLE_CONF)) + continue + + url = cache_file = None + for inv_source in inventory: + if isinstance(inv_source, str) and url is None: + url = inv_source + elif is_iterable(inv_source) and cache_file is None: + if len(inv_source) != 2: + print('WARNING: The fallback entry for {0} should be a tuple of (None,' + ' filename).\n{1}'.format(intersphinx_name, EXAMPLE_CONF)) + continue + cache_file = inv_source[1] + else: + print('WARNING: The configuration for {0} should be a tuple of one url and one' + ' tuple for a fallback filename.\n{1}'.format(intersphinx_name, + EXAMPLE_CONF)) + continue + + if url is None or cache_file is None: + print('WARNING: Could not figure out the url or fallback' + ' filename for {0}.\n{1}'.format(intersphinx_name, EXAMPLE_CONF)) + continue + + url = urllib.parse.urljoin(url, 'objects.inv') + # Resolve any relative cache files to be relative to the conf file + cache_file = conf_dir / cache_file + + # Retrieve the inventory and cache it + # The jinja CDN seems to be blocking the default urllib User-Agent + requestor = Request(headers={'User-Agent': 'Definitely Not Python ;-)'}) + with requestor.open('GET', url) as source_file: + with open(cache_file, 'wb') as f: + f.write(source_file.read()) + + print('Download of new cache files complete. Remember to git commit -a the changes') + + return 0 diff --git a/hacking/build_library/build_ansible/commands.py b/hacking/build_library/build_ansible/commands.py new file mode 100644 index 00000000..82679934 --- /dev/null +++ b/hacking/build_library/build_ansible/commands.py @@ -0,0 +1,50 @@ +# coding: utf-8 +# Copyright: (c) 2019, Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +from abc import ABCMeta, abstractmethod, abstractproperty + + +class Command(metaclass=ABCMeta): + """ + Subcommands of :program:`build-ansible.py`. + + This defines an interface that all subcommands must conform to. :program:`build-ansible.py` + will require that these things are present in order to proceed. + """ + @staticmethod + @abstractproperty + def name(): + """Name of the subcommand. It's the string to invoked it via on the command line""" + + @staticmethod + @abstractmethod + def init_parser(add_parser): + """ + Initialize and register an argparse ArgumentParser + + :arg add_parser: function which creates an ArgumentParser for the main program. + + Implementations should first create an ArgumentParser using `add_parser` and then populate + it with the command line arguments that are needed. + + .. seealso: + `add_parser` information in the :py:meth:`ArgumentParser.add_subparsers` documentation. + """ + + @staticmethod + @abstractmethod + def main(arguments): + """ + Run the command + + :arg arguments: The **parsed** command line args + + This is the Command's entrypoint. The command line args are already parsed but from here + on, the command can do its work. + """ diff --git a/hacking/build_library/build_ansible/errors.py b/hacking/build_library/build_ansible/errors.py new file mode 100644 index 00000000..a53d1fb1 --- /dev/null +++ b/hacking/build_library/build_ansible/errors.py @@ -0,0 +1,19 @@ +# coding: utf-8 +# Copyright: (c) 2019, Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +class DependencyError(Exception): + """A dependency was unmet""" + + +class MissingUserInput(Exception): + """The user failed to provide input (via cli arg or interactively""" + + +class InvalidUserInput(Exception): + """The user provided invalid input""" |