summaryrefslogtreecommitdiffstats
path: root/hacking/build_library/build_ansible/command_plugins
diff options
context:
space:
mode:
Diffstat (limited to 'hacking/build_library/build_ansible/command_plugins')
-rw-r--r--hacking/build_library/build_ansible/command_plugins/collection_meta.py72
-rw-r--r--hacking/build_library/build_ansible/command_plugins/docs_build.py255
-rw-r--r--hacking/build_library/build_ansible/command_plugins/dump_config.py82
-rw-r--r--hacking/build_library/build_ansible/command_plugins/dump_keywords.py121
-rw-r--r--hacking/build_library/build_ansible/command_plugins/file_deprecated_issues.py153
-rw-r--r--hacking/build_library/build_ansible/command_plugins/generate_man.py303
-rw-r--r--hacking/build_library/build_ansible/command_plugins/porting_guide.py138
-rw-r--r--hacking/build_library/build_ansible/command_plugins/release_announcement.py78
-rw-r--r--hacking/build_library/build_ansible/command_plugins/update_intersphinx.py101
9 files changed, 1303 insertions, 0 deletions
diff --git a/hacking/build_library/build_ansible/command_plugins/collection_meta.py b/hacking/build_library/build_ansible/command_plugins/collection_meta.py
new file mode 100644
index 0000000..41b1077
--- /dev/null
+++ b/hacking/build_library/build_ansible/command_plugins/collection_meta.py
@@ -0,0 +1,72 @@
+# coding: utf-8
+# Copyright: (c) 2019, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os
+import os.path
+import pathlib
+
+import yaml
+from ansible.module_utils.six import string_types
+from ansible.module_utils._text import to_bytes
+from antsibull_docs.jinja2.environment import doc_environment
+
+# Pylint doesn't understand Python3 namespace modules.
+from ..change_detection import update_file_if_different # pylint: disable=relative-beyond-top-level
+from ..commands import Command # pylint: disable=relative-beyond-top-level
+
+
+DEFAULT_TEMPLATE_FILE = 'collections_galaxy_meta.rst.j2'
+DEFAULT_TEMPLATE_DIR = pathlib.Path(__file__).parents[4] / 'docs/templates'
+
+
+def normalize_options(options):
+ """Normalize the options to make for easy templating"""
+ for opt in options:
+ if isinstance(opt['description'], string_types):
+ opt['description'] = [opt['description']]
+
+
+class DocumentCollectionMeta(Command):
+ name = 'collection-meta'
+
+ @classmethod
+ def init_parser(cls, add_parser):
+ parser = add_parser(cls.name, description='Generate collection galaxy.yml documentation from shared metadata')
+ parser.add_argument("-t", "--template-file", action="store", dest="template_file",
+ default=DEFAULT_TEMPLATE_FILE,
+ help="Jinja2 template to use for the config")
+ parser.add_argument("-T", "--template-dir", action="store", dest="template_dir",
+ default=str(DEFAULT_TEMPLATE_DIR),
+ help="directory containing Jinja2 templates")
+ parser.add_argument("-o", "--output-dir", action="store", dest="output_dir", default='/tmp/',
+ help="Output directory for rst files")
+ parser.add_argument("collection_defs", metavar="COLLECTION-OPTION-DEFINITIONS.yml", type=str,
+ help="Source for collection metadata option docs")
+
+ @staticmethod
+ def main(args):
+ output_dir = os.path.abspath(args.output_dir)
+ template_file_full_path = os.path.abspath(os.path.join(args.template_dir, args.template_file))
+ template_file = os.path.basename(template_file_full_path)
+ template_dir = os.path.dirname(template_file_full_path)
+
+ with open(args.collection_defs) as f:
+ options = yaml.safe_load(f)
+
+ normalize_options(options)
+
+ env = doc_environment(template_dir)
+
+ template = env.get_template(template_file)
+ output_name = os.path.join(output_dir, template_file.replace('.j2', ''))
+ temp_vars = {'options': options}
+
+ data = to_bytes(template.render(temp_vars))
+ update_file_if_different(output_name, data)
+
+ return 0
diff --git a/hacking/build_library/build_ansible/command_plugins/docs_build.py b/hacking/build_library/build_ansible/command_plugins/docs_build.py
new file mode 100644
index 0000000..50b0f90
--- /dev/null
+++ b/hacking/build_library/build_ansible/command_plugins/docs_build.py
@@ -0,0 +1,255 @@
+# coding: utf-8
+# Copyright: (c) 2020, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import absolute_import, division, print_function
+
+import glob
+import os
+import os.path
+import pathlib
+import shutil
+from tempfile import TemporaryDirectory
+
+import yaml
+
+from ansible.release import __version__ as ansible_core__version__
+
+# Pylint doesn't understand Python3 namespace modules.
+# pylint: disable=relative-beyond-top-level
+from ..commands import Command
+from ..errors import InvalidUserInput, MissingUserInput
+# pylint: enable=relative-beyond-top-level
+
+
+__metaclass__ = type
+
+
+DEFAULT_TOP_DIR = pathlib.Path(__file__).parents[4]
+DEFAULT_OUTPUT_DIR = pathlib.Path(__file__).parents[4] / 'docs/docsite'
+
+
+class NoSuchFile(Exception):
+ """An expected file was not found."""
+
+
+#
+# Helpers
+#
+
+def find_latest_ansible_dir(build_data_working):
+ """Find the most recent ansible major version."""
+ # imports here so that they don't cause unnecessary deps for all of the plugins
+ from packaging.version import InvalidVersion, Version
+
+ ansible_directories = glob.glob(os.path.join(build_data_working, '[0-9.]*'))
+
+ # Find the latest ansible version directory
+ latest = None
+ latest_ver = Version('0')
+ for directory_name in (d for d in ansible_directories if os.path.isdir(d)):
+ try:
+ new_version = Version(os.path.basename(directory_name))
+ except InvalidVersion:
+ continue
+
+ # For the devel build, we only need ansible.in, so make sure it's there
+ if not os.path.exists(os.path.join(directory_name, 'ansible.in')):
+ continue
+
+ if new_version > latest_ver:
+ latest_ver = new_version
+ latest = directory_name
+
+ if latest is None:
+ raise NoSuchFile('Could not find an ansible data directory in {0}'.format(build_data_working))
+
+ return latest
+
+
+def parse_deps_file(filename):
+ """Parse an antsibull .deps file."""
+ with open(filename, 'r', encoding='utf-8') as f:
+ contents = f.read()
+ lines = [c for line in contents.splitlines() if (c := line.strip()) and not c.startswith('#')]
+ return dict([entry.strip() for entry in line.split(':', 1)] for line in lines)
+
+
+def write_deps_file(filename, deps_data):
+ """Write an antsibull .deps file."""
+ with open(filename, 'w', encoding='utf-8') as f:
+ for key, value in deps_data.items():
+ f.write(f'{key}: {value}\n')
+
+
+def find_latest_deps_file(build_data_working, ansible_version):
+ """Find the most recent ansible deps file for the given ansible major version."""
+ # imports here so that they don't cause unnecessary deps for all of the plugins
+ from packaging.version import Version
+
+ data_dir = os.path.join(build_data_working, ansible_version)
+ deps_files = glob.glob(os.path.join(data_dir, '*.deps'))
+ if not deps_files:
+ raise Exception('No deps files exist for version {0}'.format(ansible_version))
+
+ # Find the latest version of the deps file for this major version
+ latest = None
+ latest_ver = Version('0')
+ for filename in deps_files:
+ deps_data = parse_deps_file(filename)
+ new_version = Version(deps_data['_ansible_version'])
+ if new_version > latest_ver:
+ latest_ver = new_version
+ latest = filename
+
+ if latest is None:
+ raise NoSuchFile('Could not find an ansible deps file in {0}'.format(data_dir))
+
+ return latest
+
+
+#
+# Subcommand core
+#
+
+def generate_core_docs(args):
+ """Regenerate the documentation for all plugins listed in the plugin_to_collection_file."""
+ # imports here so that they don't cause unnecessary deps for all of the plugins
+ from antsibull_docs.cli import antsibull_docs
+
+ with TemporaryDirectory() as tmp_dir:
+ #
+ # Construct a deps file with our version of ansible_core in it
+ #
+ modified_deps_file = os.path.join(tmp_dir, 'ansible.deps')
+
+ # The _ansible_version doesn't matter since we're only building docs for core
+ deps_file_contents = {'_ansible_version': ansible_core__version__,
+ '_ansible_core_version': ansible_core__version__}
+
+ with open(modified_deps_file, 'w') as f:
+ f.write(yaml.dump(deps_file_contents))
+
+ # Generate the plugin rst
+ return antsibull_docs.run(['antsibull-docs', 'stable', '--deps-file', modified_deps_file,
+ '--ansible-core-source', str(args.top_dir),
+ '--dest-dir', args.output_dir])
+
+ # If we make this more than just a driver for antsibull:
+ # Run other rst generation
+ # Run sphinx build
+
+
+#
+# Subcommand full
+#
+
+def generate_full_docs(args):
+ """Regenerate the documentation for all plugins listed in the plugin_to_collection_file."""
+ # imports here so that they don't cause unnecessary deps for all of the plugins
+ import sh
+ from antsibull_docs.cli import antsibull_docs
+
+ with TemporaryDirectory() as tmp_dir:
+ sh.git(['clone', 'https://github.com/ansible-community/ansible-build-data'], _cwd=tmp_dir)
+ # If we want to validate that the ansible version and ansible-core branch version match,
+ # this would be the place to do it.
+
+ build_data_working = os.path.join(tmp_dir, 'ansible-build-data')
+ if args.ansible_build_data:
+ build_data_working = args.ansible_build_data
+
+ ansible_version = args.ansible_version
+ if ansible_version is None:
+ ansible_version = find_latest_ansible_dir(build_data_working)
+ params = ['devel', '--pieces-file', os.path.join(ansible_version, 'ansible.in')]
+ else:
+ latest_filename = find_latest_deps_file(build_data_working, ansible_version)
+
+ # Make a copy of the deps file so that we can set the ansible-core version we'll use
+ modified_deps_file = os.path.join(tmp_dir, 'ansible.deps')
+ shutil.copyfile(latest_filename, modified_deps_file)
+
+ # Put our version of ansible-core into the deps file
+ deps_data = parse_deps_file(modified_deps_file)
+
+ deps_data['_ansible_core_version'] = ansible_core__version__
+
+ # antsibull-docs will choke when a key `_python` is found. Remove it to work around
+ # that until antsibull-docs is fixed.
+ deps_data.pop('_python', None)
+
+ write_deps_file(modified_deps_file, deps_data)
+
+ params = ['stable', '--deps-file', modified_deps_file]
+
+ # Generate the plugin rst
+ return antsibull_docs.run(['antsibull-docs'] + params +
+ ['--ansible-core-source', str(args.top_dir),
+ '--dest-dir', args.output_dir])
+
+ # If we make this more than just a driver for antsibull:
+ # Run other rst generation
+ # Run sphinx build
+
+
+class CollectionPluginDocs(Command):
+ name = 'docs-build'
+ _ACTION_HELP = """Action to perform.
+ full: Regenerate the rst for the full ansible website.
+ core: Regenerate the rst for plugins in ansible-core and then build the website.
+ named: Regenerate the rst for the named plugins and then build the website.
+ """
+
+ @classmethod
+ def init_parser(cls, add_parser):
+ parser = add_parser(cls.name,
+ description='Generate documentation for plugins in collections.'
+ ' Plugins in collections will have a stub file in the normal plugin'
+ ' documentation location that says the module is in a collection and'
+ ' point to generated plugin documentation under the collections/'
+ ' hierarchy.')
+ # I think we should make the actions a subparser but need to look in git history and see if
+ # we tried that and changed it for some reason.
+ parser.add_argument('action', action='store', choices=('full', 'core', 'named'),
+ default='full', help=cls._ACTION_HELP)
+ parser.add_argument("-o", "--output-dir", action="store", dest="output_dir",
+ default=DEFAULT_OUTPUT_DIR,
+ help="Output directory for generated doc files")
+ parser.add_argument("-t", "--top-dir", action="store", dest="top_dir",
+ default=DEFAULT_TOP_DIR,
+ help="Toplevel directory of this ansible-core checkout or expanded"
+ " tarball.")
+ parser.add_argument("-l", "--limit-to-modules", '--limit-to', action="store",
+ dest="limit_to", default=None,
+ help="Limit building module documentation to comma-separated list of"
+ " plugins. Specify non-existing plugin name for no plugins.")
+ parser.add_argument('--ansible-version', action='store',
+ dest='ansible_version', default=None,
+ help='The version of the ansible package to make documentation for.'
+ ' This only makes sense when used with full.')
+ parser.add_argument('--ansible-build-data', action='store',
+ dest='ansible_build_data', default=None,
+ help='A checkout of the ansible-build-data repo. Useful for'
+ ' debugging.')
+
+ @staticmethod
+ def main(args):
+ # normalize and validate CLI args
+
+ if args.ansible_version and args.action != 'full':
+ raise InvalidUserInput('--ansible-version is only for use with "full".')
+
+ if not args.output_dir:
+ args.output_dir = os.path.abspath(str(DEFAULT_OUTPUT_DIR))
+
+ if args.action == 'full':
+ return generate_full_docs(args)
+
+ if args.action == 'core':
+ return generate_core_docs(args)
+ # args.action == 'named' (Invalid actions are caught by argparse)
+ raise NotImplementedError('Building docs for specific files is not yet implemented')
+
+ # return 0
diff --git a/hacking/build_library/build_ansible/command_plugins/dump_config.py b/hacking/build_library/build_ansible/command_plugins/dump_config.py
new file mode 100644
index 0000000..33591b4
--- /dev/null
+++ b/hacking/build_library/build_ansible/command_plugins/dump_config.py
@@ -0,0 +1,82 @@
+# coding: utf-8
+# Copyright: (c) 2019, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os
+import os.path
+import pathlib
+
+import yaml
+from jinja2 import Environment, FileSystemLoader
+from ansible.module_utils._text import to_bytes
+
+# Pylint doesn't understand Python3 namespace modules.
+from ..change_detection import update_file_if_different # pylint: disable=relative-beyond-top-level
+from ..commands import Command # pylint: disable=relative-beyond-top-level
+
+
+DEFAULT_TEMPLATE_FILE = 'config.rst.j2'
+DEFAULT_TEMPLATE_DIR = pathlib.Path(__file__).parents[4] / 'docs/templates'
+
+
+def fix_description(config_options):
+ '''some descriptions are strings, some are lists. workaround it...'''
+
+ for config_key in list(config_options.keys()):
+
+ # drop internal entries
+ if config_key.startswith('_'):
+ del config_options[config_key]
+ continue
+
+ description = config_options[config_key].get('description', [])
+ if isinstance(description, list):
+ desc_list = description
+ else:
+ desc_list = [description]
+ config_options[config_key]['description'] = desc_list
+ return config_options
+
+
+class DocumentConfig(Command):
+ name = 'document-config'
+
+ @classmethod
+ def init_parser(cls, add_parser):
+ parser = add_parser(cls.name, description='Generate module documentation from metadata')
+ parser.add_argument("-t", "--template-file", action="store", dest="template_file",
+ default=DEFAULT_TEMPLATE_FILE,
+ help="Jinja2 template to use for the config")
+ parser.add_argument("-T", "--template-dir", action="store", dest="template_dir",
+ default=str(DEFAULT_TEMPLATE_DIR),
+ help="directory containing Jinja2 templates")
+ parser.add_argument("-o", "--output-dir", action="store", dest="output_dir", default='/tmp/',
+ help="Output directory for rst files")
+ parser.add_argument("config_defs", metavar="CONFIG-OPTION-DEFINITIONS.yml", type=str,
+ help="Source for config option docs")
+
+ @staticmethod
+ def main(args):
+ output_dir = os.path.abspath(args.output_dir)
+ template_file_full_path = os.path.abspath(os.path.join(args.template_dir, args.template_file))
+ template_file = os.path.basename(template_file_full_path)
+ template_dir = os.path.dirname(template_file_full_path)
+
+ with open(args.config_defs) as f:
+ config_options = yaml.safe_load(f)
+
+ config_options = fix_description(config_options)
+
+ env = Environment(loader=FileSystemLoader(template_dir), trim_blocks=True,)
+ template = env.get_template(template_file)
+ output_name = os.path.join(output_dir, template_file.replace('.j2', ''))
+ temp_vars = {'config_options': config_options}
+
+ data = to_bytes(template.render(temp_vars))
+ update_file_if_different(output_name, data)
+
+ return 0
diff --git a/hacking/build_library/build_ansible/command_plugins/dump_keywords.py b/hacking/build_library/build_ansible/command_plugins/dump_keywords.py
new file mode 100644
index 0000000..e937931
--- /dev/null
+++ b/hacking/build_library/build_ansible/command_plugins/dump_keywords.py
@@ -0,0 +1,121 @@
+# coding: utf-8
+# Copyright: (c) 2019, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import importlib
+import os.path
+import pathlib
+import re
+from ansible.module_utils.compat.version import LooseVersion
+
+import jinja2
+import yaml
+from jinja2 import Environment, FileSystemLoader
+
+from ansible.module_utils._text import to_bytes
+
+# Pylint doesn't understand Python3 namespace modules.
+from ..change_detection import update_file_if_different # pylint: disable=relative-beyond-top-level
+from ..commands import Command # pylint: disable=relative-beyond-top-level
+
+
+DEFAULT_TEMPLATE_DIR = str(pathlib.Path(__file__).resolve().parents[4] / 'docs/templates')
+TEMPLATE_FILE = 'playbooks_keywords.rst.j2'
+PLAYBOOK_CLASS_NAMES = ['Play', 'Role', 'Block', 'Task']
+
+
+def load_definitions(keyword_definitions_file):
+ docs = {}
+ with open(keyword_definitions_file) as f:
+ docs = yaml.safe_load(f)
+
+ return docs
+
+
+def extract_keywords(keyword_definitions):
+ pb_keywords = {}
+ for pb_class_name in PLAYBOOK_CLASS_NAMES:
+ if pb_class_name == 'Play':
+ module_name = 'ansible.playbook'
+ else:
+ module_name = 'ansible.playbook.{0}'.format(pb_class_name.lower())
+ module = importlib.import_module(module_name)
+ playbook_class = getattr(module, pb_class_name, None)
+ if playbook_class is None:
+ raise ImportError("We weren't able to import the module {0}".format(module_name))
+
+ # Maintain order of the actual class names for our output
+ # Build up a mapping of playbook classes to the attributes that they hold
+ pb_keywords[pb_class_name] = {k: v for (k, v) in playbook_class.fattributes.items()
+ # Filter private attributes as they're not usable in playbooks
+ if not v.private}
+
+ # pick up definitions if they exist
+ for keyword in tuple(pb_keywords[pb_class_name]):
+ if keyword in keyword_definitions:
+ pb_keywords[pb_class_name][keyword] = keyword_definitions[keyword]
+ else:
+ # check if there is an alias, otherwise undocumented
+ alias = getattr(playbook_class.fattributes.get(keyword), 'alias', None)
+ if alias and alias in keyword_definitions:
+ pb_keywords[pb_class_name][alias] = keyword_definitions[alias]
+ del pb_keywords[pb_class_name][keyword]
+ else:
+ pb_keywords[pb_class_name][keyword] = ' UNDOCUMENTED!! '
+
+ # loop is really with_ for users
+ if pb_class_name == 'Task':
+ pb_keywords[pb_class_name]['with_<lookup_plugin>'] = (
+ 'The same as ``loop`` but magically adds the output of any lookup plugin to'
+ ' generate the item list.')
+
+ # local_action is implicit with action
+ if 'action' in pb_keywords[pb_class_name]:
+ pb_keywords[pb_class_name]['local_action'] = ('Same as action but also implies'
+ ' ``delegate_to: localhost``')
+
+ return pb_keywords
+
+
+def generate_page(pb_keywords, template_dir):
+ env = Environment(loader=FileSystemLoader(template_dir), trim_blocks=True,)
+ template = env.get_template(TEMPLATE_FILE)
+ tempvars = {'pb_keywords': pb_keywords, 'playbook_class_names': PLAYBOOK_CLASS_NAMES}
+
+ keyword_page = template.render(tempvars)
+ if LooseVersion(jinja2.__version__) < LooseVersion('2.10'):
+ # jinja2 < 2.10's indent filter indents blank lines. Cleanup
+ keyword_page = re.sub(' +\n', '\n', keyword_page)
+
+ return keyword_page
+
+
+class DocumentKeywords(Command):
+ name = 'document-keywords'
+
+ @classmethod
+ def init_parser(cls, add_parser):
+ parser = add_parser(cls.name, description='Generate playbook keyword documentation from'
+ ' code and descriptions')
+ parser.add_argument("-T", "--template-dir", action="store", dest="template_dir",
+ default=DEFAULT_TEMPLATE_DIR,
+ help="directory containing Jinja2 templates")
+ parser.add_argument("-o", "--output-dir", action="store", dest="output_dir",
+ default='/tmp/', help="Output directory for rst files")
+ parser.add_argument("keyword_defs", metavar="KEYWORD-DEFINITIONS.yml", type=str,
+ help="Source for playbook keyword docs")
+
+ @staticmethod
+ def main(args):
+ keyword_definitions = load_definitions(args.keyword_defs)
+ pb_keywords = extract_keywords(keyword_definitions)
+
+ keyword_page = generate_page(pb_keywords, args.template_dir)
+ outputname = os.path.join(args.output_dir, TEMPLATE_FILE.replace('.j2', ''))
+ update_file_if_different(outputname, to_bytes(keyword_page))
+
+ return 0
diff --git a/hacking/build_library/build_ansible/command_plugins/file_deprecated_issues.py b/hacking/build_library/build_ansible/command_plugins/file_deprecated_issues.py
new file mode 100644
index 0000000..139ecc4
--- /dev/null
+++ b/hacking/build_library/build_ansible/command_plugins/file_deprecated_issues.py
@@ -0,0 +1,153 @@
+# -*- coding: utf-8 -*-
+# (c) 2017, Matt Martz <matt@sivel.net>
+# (c) 2019, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+import argparse
+import os
+import time
+
+from collections import defaultdict
+
+from ansible.release import __version__ as ansible_version
+
+# Pylint doesn't understand Python3 namespace modules.
+from ..commands import Command # pylint: disable=relative-beyond-top-level
+from .. import errors # pylint: disable=relative-beyond-top-level
+
+ANSIBLE_MAJOR_VERSION = '.'.join(ansible_version.split('.')[:2])
+
+
+def get_token(token_file):
+ if token_file:
+ return token_file.read().strip()
+
+ token = os.getenv('GITHUB_TOKEN').strip()
+ if not token:
+ raise errors.MissingUserInput(
+ 'Please provide a file containing a github oauth token with public_repo scope'
+ ' via the --github-token argument or set the GITHUB_TOKEN env var with your'
+ ' github oauth token'
+ )
+ return token
+
+
+def parse_deprecations(problems_file_handle):
+ deprecated = defaultdict(list)
+ deprecation_errors = problems_file_handle.read()
+ for line in deprecation_errors.splitlines():
+ path = line.split(':')[0]
+ if path.endswith('__init__.py'):
+ component = os.path.basename(os.path.dirname(path))
+ else:
+ component, dummy = os.path.splitext(os.path.basename(path).lstrip('_'))
+
+ title = (
+ '%s contains deprecated call to be removed in %s' %
+ (component, ANSIBLE_MAJOR_VERSION)
+ )
+ deprecated[component].append(
+ dict(title=title, path=path, line=line)
+ )
+ return deprecated
+
+
+def find_project_todo_column(repo, project_name):
+ project = None
+ for project in repo.projects():
+ if project.name.lower() == project_name:
+ break
+ else:
+ raise errors.InvalidUserInput('%s was an invalid project name' % project_name)
+
+ for project_column in project.columns():
+ column_name = project_column.name.lower()
+ if 'todo' in column_name or 'backlog' in column_name or 'to do' in column_name:
+ return project_column
+
+ raise Exception('Unable to determine the todo column in'
+ ' project %s' % project_name)
+
+
+def create_issues(deprecated, body_tmpl, repo):
+ issues = []
+
+ for component, items in deprecated.items():
+ title = items[0]['title']
+ path = '\n'.join(set((i['path']) for i in items))
+ line = '\n'.join(i['line'] for i in items)
+ body = body_tmpl % dict(component=component, path=path,
+ line=line,
+ version=ANSIBLE_MAJOR_VERSION)
+
+ issue = repo.create_issue(title, body=body, labels=['deprecated'])
+ print(issue)
+ issues.append(issue)
+
+ # Sleep a little, so that the API doesn't block us
+ time.sleep(0.5)
+
+ return issues
+
+
+class FileDeprecationTickets(Command):
+ name = 'file-deprecation-tickets'
+
+ @classmethod
+ def init_parser(cls, add_parser):
+ parser = add_parser(cls.name, description='File tickets to cleanup deprecated features for'
+ ' the next release')
+ parser.add_argument('--template', default='deprecated_issue_template.md',
+ type=argparse.FileType('r'),
+ help='Path to markdown file template to be used for issue '
+ 'body. Default: %(default)s')
+ parser.add_argument('--project-name', default='', type=str,
+ help='Name of a github project to assign all issues to')
+ parser.add_argument('--github-token', type=argparse.FileType('r'),
+ help='Path to file containing a github token with public_repo scope.'
+ ' This token in this file will be used to open the deprcation'
+ ' tickets and add them to the github project. If not given,'
+ ' the GITHUB_TOKEN environment variable will be tried')
+ parser.add_argument('problems', type=argparse.FileType('r'),
+ help='Path to file containing pylint output for the '
+ 'ansible-deprecated-version check')
+
+ @staticmethod
+ def main(args):
+ try:
+ from github3 import GitHub
+ except ImportError:
+ raise errors.DependencyError(
+ 'This command needs the github3.py library installed to work'
+ )
+
+ token = get_token(args.github_token)
+ args.github_token.close()
+
+ deprecated = parse_deprecations(args.problems)
+ args.problems.close()
+
+ body_tmpl = args.template.read()
+ args.template.close()
+
+ project_name = args.project_name.strip().lower()
+
+ gh_conn = GitHub(token=token)
+ repo = gh_conn.repository('abadger', 'ansible')
+
+ if project_name:
+ project_column = find_project_todo_column(repo, project_name)
+
+ issues = create_issues(deprecated, body_tmpl, repo)
+
+ if project_column:
+ for issue in issues:
+ project_column.create_card_with_issue(issue)
+ time.sleep(0.5)
+
+ return 0
diff --git a/hacking/build_library/build_ansible/command_plugins/generate_man.py b/hacking/build_library/build_ansible/command_plugins/generate_man.py
new file mode 100644
index 0000000..3795c0d
--- /dev/null
+++ b/hacking/build_library/build_ansible/command_plugins/generate_man.py
@@ -0,0 +1,303 @@
+# coding: utf-8
+# Copyright: (c) 2019, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+import argparse
+import os.path
+import pathlib
+import sys
+
+from jinja2 import Environment, FileSystemLoader
+
+from ansible.module_utils._text import to_bytes
+
+# Pylint doesn't understand Python3 namespace modules.
+from ..change_detection import update_file_if_different # pylint: disable=relative-beyond-top-level
+from ..commands import Command # pylint: disable=relative-beyond-top-level
+
+
+DEFAULT_TEMPLATE_FILE = pathlib.Path(__file__).parents[4] / 'docs/templates/man.j2'
+
+
+# from https://www.python.org/dev/peps/pep-0257/
+def trim_docstring(docstring):
+ if not docstring:
+ return ''
+ # Convert tabs to spaces (following the normal Python rules)
+ # and split into a list of lines:
+ lines = docstring.expandtabs().splitlines()
+ # Determine minimum indentation (first line doesn't count):
+ indent = sys.maxsize
+ for line in lines[1:]:
+ stripped = line.lstrip()
+ if stripped:
+ indent = min(indent, len(line) - len(stripped))
+ # Remove indentation (first line is special):
+ trimmed = [lines[0].strip()]
+ if indent < sys.maxsize:
+ for line in lines[1:]:
+ trimmed.append(line[indent:].rstrip())
+ # Strip off trailing and leading blank lines:
+ while trimmed and not trimmed[-1]:
+ trimmed.pop()
+ while trimmed and not trimmed[0]:
+ trimmed.pop(0)
+ # Return a single string:
+ return '\n'.join(trimmed)
+
+
+def get_options(optlist):
+ ''' get actual options '''
+
+ opts = []
+ for opt in optlist:
+ res = {
+ 'desc': opt.help,
+ 'options': opt.option_strings
+ }
+ if isinstance(opt, argparse._StoreAction):
+ res['arg'] = opt.dest.upper()
+ elif not res['options']:
+ continue
+ opts.append(res)
+
+ return opts
+
+
+def dedupe_groups(parser):
+ action_groups = []
+ for action_group in parser._action_groups:
+ found = False
+ for a in action_groups:
+ if a._actions == action_group._actions:
+ found = True
+ break
+ if not found:
+ action_groups.append(action_group)
+ return action_groups
+
+
+def get_option_groups(option_parser):
+ groups = []
+ for action_group in dedupe_groups(option_parser)[1:]:
+ group_info = {}
+ group_info['desc'] = action_group.description
+ group_info['options'] = action_group._actions
+ group_info['group_obj'] = action_group
+ groups.append(group_info)
+ return groups
+
+
+def opt_doc_list(parser):
+ ''' iterate over options lists '''
+
+ results = []
+ for option_group in dedupe_groups(parser)[1:]:
+ results.extend(get_options(option_group._actions))
+
+ results.extend(get_options(parser._actions))
+
+ return results
+
+
+# def opts_docs(cli, name):
+def opts_docs(cli_class_name, cli_module_name):
+ ''' generate doc structure from options '''
+
+ cli_name = 'ansible-%s' % cli_module_name
+ if cli_module_name == 'adhoc':
+ cli_name = 'ansible'
+
+ # WIth no action/subcommand
+ # shared opts set
+ # instantiate each cli and ask its options
+ cli_klass = getattr(__import__("ansible.cli.%s" % cli_module_name,
+ fromlist=[cli_class_name]), cli_class_name)
+ cli = cli_klass([cli_name])
+
+ # parse the common options
+ try:
+ cli.init_parser()
+ except Exception:
+ pass
+
+ # base/common cli info
+ docs = {
+ 'cli': cli_module_name,
+ 'cli_name': cli_name,
+ 'usage': cli.parser.format_usage(),
+ 'short_desc': cli.parser.description,
+ 'long_desc': trim_docstring(cli.__doc__),
+ 'actions': {},
+ 'content_depth': 2,
+ }
+ option_info = {'option_names': [],
+ 'options': [],
+ 'groups': []}
+
+ for extras in ('ARGUMENTS'):
+ if hasattr(cli, extras):
+ docs[extras.lower()] = getattr(cli, extras)
+
+ common_opts = opt_doc_list(cli.parser)
+ groups_info = get_option_groups(cli.parser)
+ shared_opt_names = []
+ for opt in common_opts:
+ shared_opt_names.extend(opt.get('options', []))
+
+ option_info['options'] = common_opts
+ option_info['option_names'] = shared_opt_names
+
+ option_info['groups'].extend(groups_info)
+
+ docs.update(option_info)
+
+ # now for each action/subcommand
+ # force populate parser with per action options
+
+ def get_actions(parser, docs):
+ # use class attrs not the attrs on a instance (not that it matters here...)
+ try:
+ subparser = parser._subparsers._group_actions[0].choices
+ except AttributeError:
+ subparser = {}
+
+ depth = 0
+
+ for action, parser in subparser.items():
+ action_info = {'option_names': [],
+ 'options': [],
+ 'actions': {}}
+ # docs['actions'][action] = {}
+ # docs['actions'][action]['name'] = action
+ action_info['name'] = action
+ action_info['desc'] = trim_docstring(getattr(cli, 'execute_%s' % action).__doc__)
+
+ # docs['actions'][action]['desc'] = getattr(cli, 'execute_%s' % action).__doc__.strip()
+ action_doc_list = opt_doc_list(parser)
+
+ uncommon_options = []
+ for action_doc in action_doc_list:
+ # uncommon_options = []
+
+ option_aliases = action_doc.get('options', [])
+ for option_alias in option_aliases:
+
+ if option_alias in shared_opt_names:
+ continue
+
+ # TODO: use set
+ if option_alias not in action_info['option_names']:
+ action_info['option_names'].append(option_alias)
+
+ if action_doc in action_info['options']:
+ continue
+
+ uncommon_options.append(action_doc)
+
+ action_info['options'] = uncommon_options
+
+ depth = 1 + get_actions(parser, action_info)
+
+ docs['actions'][action] = action_info
+
+ return depth
+
+ action_depth = get_actions(cli.parser, docs)
+ docs['content_depth'] = action_depth + 1
+
+ docs['options'] = opt_doc_list(cli.parser)
+ return docs
+
+
+class GenerateMan(Command):
+ name = 'generate-man'
+
+ @classmethod
+ def init_parser(cls, add_parser):
+ parser = add_parser(name=cls.name,
+ description='Generate cli documentation from cli docstrings')
+
+ parser.add_argument("-t", "--template-file", action="store", dest="template_file",
+ default=DEFAULT_TEMPLATE_FILE, help="path to jinja2 template")
+ parser.add_argument("-o", "--output-dir", action="store", dest="output_dir",
+ default='/tmp/', help="Output directory for rst files")
+ parser.add_argument("-f", "--output-format", action="store", dest="output_format",
+ default='man',
+ help="Output format for docs (the default 'man' or 'rst')")
+ parser.add_argument('cli_modules', help='CLI module name(s)', metavar='MODULE_NAME', nargs='*')
+
+ @staticmethod
+ def main(args):
+ template_file = args.template_file
+ template_path = os.path.expanduser(template_file)
+ template_dir = os.path.abspath(os.path.dirname(template_path))
+ template_basename = os.path.basename(template_file)
+
+ output_dir = os.path.abspath(args.output_dir)
+ output_format = args.output_format
+
+ cli_modules = args.cli_modules
+
+ # various cli parsing things checks sys.argv if the 'args' that are passed in are []
+ # so just remove any args so the cli modules dont try to parse them resulting in warnings
+ sys.argv = [sys.argv[0]]
+
+ allvars = {}
+ output = {}
+ cli_list = []
+ cli_bin_name_list = []
+
+ # for binary in os.listdir('../../lib/ansible/cli'):
+ for cli_module_name in cli_modules:
+ binary = os.path.basename(os.path.expanduser(cli_module_name))
+
+ if not binary.endswith('.py'):
+ continue
+ elif binary == '__init__.py':
+ continue
+
+ cli_name = os.path.splitext(binary)[0]
+
+ if cli_name == 'adhoc':
+ cli_class_name = 'AdHocCLI'
+ # myclass = 'AdHocCLI'
+ output[cli_name] = 'ansible.1.rst.in'
+ cli_bin_name = 'ansible'
+ else:
+ # myclass = "%sCLI" % libname.capitalize()
+ cli_class_name = "%sCLI" % cli_name.capitalize()
+ output[cli_name] = 'ansible-%s.1.rst.in' % cli_name
+ cli_bin_name = 'ansible-%s' % cli_name
+
+ # FIXME:
+ allvars[cli_name] = opts_docs(cli_class_name, cli_name)
+ cli_bin_name_list.append(cli_bin_name)
+
+ cli_list = allvars.keys()
+
+ doc_name_formats = {'man': '%s.1.rst.in',
+ 'rst': '%s.rst'}
+
+ for cli_name in cli_list:
+
+ # template it!
+ env = Environment(loader=FileSystemLoader(template_dir))
+ template = env.get_template(template_basename)
+
+ # add rest to vars
+ tvars = allvars[cli_name]
+ tvars['cli_list'] = cli_list
+ tvars['cli_bin_name_list'] = cli_bin_name_list
+ tvars['cli'] = cli_name
+ if '-i' in tvars['options']:
+ print('uses inventory')
+
+ manpage = template.render(tvars)
+ filename = os.path.join(output_dir, doc_name_formats[output_format] % tvars['cli_name'])
+ update_file_if_different(filename, to_bytes(manpage))
diff --git a/hacking/build_library/build_ansible/command_plugins/porting_guide.py b/hacking/build_library/build_ansible/command_plugins/porting_guide.py
new file mode 100644
index 0000000..431485b
--- /dev/null
+++ b/hacking/build_library/build_ansible/command_plugins/porting_guide.py
@@ -0,0 +1,138 @@
+# coding: utf-8
+# Copyright: (c) 2019, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+from jinja2 import Environment, DictLoader
+
+# Pylint doesn't understand Python3 namespace modules.
+from ..commands import Command # pylint: disable=relative-beyond-top-level
+
+
+PORTING_GUIDE_TEMPLATE = """
+.. _porting_{{ ver }}_guide_core:
+
+*******************************
+Ansible-core {{ ver }} Porting Guide
+*******************************
+
+This section discusses the behavioral changes between ``ansible-core`` {{ prev_ver }} and ``ansible-core`` {{ ver }}.
+
+It is intended to assist in updating your playbooks, plugins and other parts of your Ansible infrastructure so they will work with this version of Ansible.
+
+We suggest you read this page along with `ansible-core Changelog for {{ ver }} <https://github.com/ansible/ansible/blob/stable-{{ ver }}/changelogs/CHANGELOG-v{{ ver }}.rst>`_ to understand what updates you may need to make.
+
+This document is part of a collection on porting. The complete list of porting guides can be found at :ref:`porting guides <porting_guides>`.
+
+.. contents:: Topics
+
+
+Playbook
+========
+
+No notable changes
+
+
+Command Line
+============
+
+No notable changes
+
+
+Deprecated
+==========
+
+No notable changes
+
+
+Modules
+=======
+
+No notable changes
+
+
+Modules removed
+---------------
+
+The following modules no longer exist:
+
+* No notable changes
+
+
+Deprecation notices
+-------------------
+
+No notable changes
+
+
+Noteworthy module changes
+-------------------------
+
+No notable changes
+
+
+Plugins
+=======
+
+No notable changes
+
+
+Porting custom scripts
+======================
+
+No notable changes
+
+
+Networking
+==========
+
+No notable changes
+
+""" # noqa for E501 (line length).
+# jinja2 is horrid about getting rid of extra newlines so we have to have a single line per
+# paragraph for proper wrapping to occur
+
+JINJA_ENV = Environment(
+ loader=DictLoader({'porting_guide': PORTING_GUIDE_TEMPLATE,
+ }),
+ extensions=['jinja2.ext.i18n'],
+ trim_blocks=True,
+ lstrip_blocks=True,
+)
+
+
+def generate_porting_guide(version):
+ template = JINJA_ENV.get_template('porting_guide')
+
+ version_list = version.split('.')
+ version_list[-1] = str(int(version_list[-1]) - 1)
+ previous_version = '.'.join(version_list)
+
+ content = template.render(ver=version, prev_ver=previous_version)
+ return content
+
+
+def write_guide(version, guide_content):
+ filename = 'docs/docsite/rst/porting_guides/porting_guide_core_{0}.rst'.format(version)
+ with open(filename, 'w') as out_file:
+ out_file.write(guide_content)
+
+
+class PortingGuideCommand(Command):
+ name = 'porting-guide'
+
+ @classmethod
+ def init_parser(cls, add_parser):
+ parser = add_parser(cls.name, description="Generate a fresh porting guide template")
+ parser.add_argument("--version", dest="version", type=str, required=True, action='store',
+ help="Version of Ansible to write the porting guide for")
+
+ @staticmethod
+ def main(args):
+ guide_content = generate_porting_guide(args.version)
+ write_guide(args.version, guide_content)
+ return 0
diff --git a/hacking/build_library/build_ansible/command_plugins/release_announcement.py b/hacking/build_library/build_ansible/command_plugins/release_announcement.py
new file mode 100644
index 0000000..edc928a
--- /dev/null
+++ b/hacking/build_library/build_ansible/command_plugins/release_announcement.py
@@ -0,0 +1,78 @@
+# coding: utf-8
+# Copyright: (c) 2019, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+import sys
+from collections import UserString
+from ansible.module_utils.compat.version import LooseVersion
+
+# Pylint doesn't understand Python3 namespace modules.
+from ..commands import Command # pylint: disable=relative-beyond-top-level
+from .. import errors # pylint: disable=relative-beyond-top-level
+
+
+class VersionStr(UserString):
+ def __init__(self, string):
+ super().__init__(string.strip())
+ self.ver_obj = LooseVersion(string)
+
+
+def transform_args(args):
+ # Make it possible to sort versions in the jinja2 templates
+ new_versions = []
+ for version in args.versions:
+ new_versions.append(VersionStr(version))
+ args.versions = new_versions
+
+ return args
+
+
+def write_message(filename, message):
+ if filename != '-':
+ with open(filename, 'w') as out_file:
+ out_file.write(message)
+ else:
+ sys.stdout.write('\n\n')
+ sys.stdout.write(message)
+
+
+class ReleaseAnnouncementCommand(Command):
+ name = 'release-announcement'
+
+ @classmethod
+ def init_parser(cls, add_parser):
+ parser = add_parser(cls.name,
+ description="Generate email and twitter announcements from template")
+
+ parser.add_argument("--version", dest="versions", type=str, required=True, action='append',
+ help="Versions of Ansible to announce")
+ parser.add_argument("--name", type=str, required=True, help="Real name to use on emails")
+ parser.add_argument("--email-out", type=str, default="-",
+ help="Filename to place the email announcement into")
+ parser.add_argument("--twitter-out", type=str, default="-",
+ help="Filename to place the twitter announcement into")
+
+ @classmethod
+ def main(cls, args):
+ if sys.version_info < (3, 6):
+ raise errors.DependencyError('The {0} subcommand needs Python-3.6+'
+ ' to run'.format(cls.name))
+
+ # Import here because these functions are invalid on Python-3.5 and the command plugins and
+ # init_parser() method need to be compatible with Python-3.4+ for now.
+ # Pylint doesn't understand Python3 namespace modules.
+ from .. announce import create_short_message, create_long_message # pylint: disable=relative-beyond-top-level
+
+ args = transform_args(args)
+
+ twitter_message = create_short_message(args.versions)
+ email_message = create_long_message(args.versions, args.name)
+
+ write_message(args.twitter_out, twitter_message)
+ write_message(args.email_out, email_message)
+ return 0
diff --git a/hacking/build_library/build_ansible/command_plugins/update_intersphinx.py b/hacking/build_library/build_ansible/command_plugins/update_intersphinx.py
new file mode 100644
index 0000000..9337859
--- /dev/null
+++ b/hacking/build_library/build_ansible/command_plugins/update_intersphinx.py
@@ -0,0 +1,101 @@
+# -*- coding: utf-8 -*-
+# (c) 2020, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+import argparse
+import importlib
+import os
+import pathlib
+import time
+import urllib.parse
+
+from collections import defaultdict
+
+from ansible.module_utils.common.collections import is_iterable
+from ansible.module_utils.urls import Request
+
+# Pylint doesn't understand Python3 namespace modules.
+from ..commands import Command # pylint: disable=relative-beyond-top-level
+from .. import errors # pylint: disable=relative-beyond-top-level
+
+
+EXAMPLE_CONF = """
+A proper intersphinx_mapping entry should look like:
+ intersphinx_mapping = {
+ 'python3': ('https://docs.python.org/3', (None, 'python3.inv'))
+ }
+
+See the intersphinx docs for more info:
+ https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html#confval-intersphinx_mapping
+"""
+
+
+class UpdateIntersphinxCache(Command):
+ name = 'update-intersphinx-cache'
+
+ @classmethod
+ def init_parser(cls, add_parser):
+ parser = add_parser(cls.name, description='Update cached intersphinx mappings. This'
+ ' updates the cached intersphinx mappings for docs to reference'
+ ' documentation from other projects.')
+ parser.add_argument('-o', '--output-dir', action='store',
+ help='Path to directory the cached objects.inv files are stored in')
+ parser.add_argument('-c', '--conf-file', action='store',
+ help='Path to a sphinx config file to retrieve intersphinx config from')
+
+ @staticmethod
+ def main(args):
+ # Retrieve the intersphinx information from the sphinx config file
+ conf_dir = pathlib.Path(args.conf_file).parent
+
+ conf_module_spec = importlib.util.spec_from_file_location('sphinxconf', args.conf_file)
+ conf_module = importlib.util.module_from_spec(conf_module_spec)
+ conf_module_spec.loader.exec_module(conf_module)
+ intersphinx_mapping = conf_module.intersphinx_mapping
+
+ for intersphinx_name, inventory in intersphinx_mapping.items():
+ if not is_iterable(inventory) or len(inventory) != 2:
+ print('WARNING: The intersphinx entry for {0} must be'
+ ' a two-tuple.\n{1}'.format(intersphinx_name, EXAMPLE_CONF))
+ continue
+
+ url = cache_file = None
+ for inv_source in inventory:
+ if isinstance(inv_source, str) and url is None:
+ url = inv_source
+ elif is_iterable(inv_source) and cache_file is None:
+ if len(inv_source) != 2:
+ print('WARNING: The fallback entry for {0} should be a tuple of (None,'
+ ' filename).\n{1}'.format(intersphinx_name, EXAMPLE_CONF))
+ continue
+ cache_file = inv_source[1]
+ else:
+ print('WARNING: The configuration for {0} should be a tuple of one url and one'
+ ' tuple for a fallback filename.\n{1}'.format(intersphinx_name,
+ EXAMPLE_CONF))
+ continue
+
+ if url is None or cache_file is None:
+ print('WARNING: Could not figure out the url or fallback'
+ ' filename for {0}.\n{1}'.format(intersphinx_name, EXAMPLE_CONF))
+ continue
+
+ url = urllib.parse.urljoin(url, 'objects.inv')
+ # Resolve any relative cache files to be relative to the conf file
+ cache_file = conf_dir / cache_file
+
+ # Retrieve the inventory and cache it
+ # The jinja CDN seems to be blocking the default urllib User-Agent
+ requestor = Request(headers={'User-Agent': 'Definitely Not Python ;-)'})
+ with requestor.open('GET', url) as source_file:
+ with open(cache_file, 'wb') as f:
+ f.write(source_file.read())
+
+ print('Download of new cache files complete. Remember to git commit -a the changes')
+
+ return 0