From a453ac31f3428614cceb99027f8efbdb9258a40b Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Tue, 14 May 2024 22:03:01 +0200 Subject: Adding upstream version 2.10.7+merged+base+2.10.8+dfsg. Signed-off-by: Daniel Baumann --- docs/docsite/rst/dev_guide/debugging.rst | 112 +++ docs/docsite/rst/dev_guide/developing_api.rst | 47 ++ .../rst/dev_guide/developing_collections.rst | 812 +++++++++++++++++++ docs/docsite/rst/dev_guide/developing_core.rst | 21 + .../docsite/rst/dev_guide/developing_inventory.rst | 422 ++++++++++ docs/docsite/rst/dev_guide/developing_locally.rst | 105 +++ .../rst/dev_guide/developing_module_utilities.rst | 69 ++ docs/docsite/rst/dev_guide/developing_modules.rst | 51 ++ .../developing_modules_best_practices.rst | 177 +++++ .../rst/dev_guide/developing_modules_checklist.rst | 46 ++ .../dev_guide/developing_modules_documenting.rst | 442 +++++++++++ .../rst/dev_guide/developing_modules_general.rst | 221 ++++++ .../dev_guide/developing_modules_general_aci.rst | 443 +++++++++++ .../developing_modules_general_windows.rst | 696 ++++++++++++++++ .../rst/dev_guide/developing_modules_in_groups.rst | 80 ++ docs/docsite/rst/dev_guide/developing_plugins.rst | 495 ++++++++++++ .../dev_guide/developing_program_flow_modules.rst | 880 +++++++++++++++++++++ docs/docsite/rst/dev_guide/developing_python_3.rst | 404 ++++++++++ docs/docsite/rst/dev_guide/developing_rebasing.rst | 83 ++ docs/docsite/rst/dev_guide/index.rst | 92 +++ docs/docsite/rst/dev_guide/migrating_roles.rst | 410 ++++++++++ docs/docsite/rst/dev_guide/module_lifecycle.rst | 50 ++ .../rst/dev_guide/overview_architecture.rst | 149 ++++ .../rst/dev_guide/platforms/aws_guidelines.rst | 754 ++++++++++++++++++ .../dev_guide/platforms/openstack_guidelines.rst | 57 ++ .../rst/dev_guide/platforms/ovirt_dev_guide.rst | 220 ++++++ .../rst/dev_guide/platforms/vmware_guidelines.rst | 270 +++++++ .../rst/dev_guide/shared_snippets/licensing.txt | 9 + .../rst/dev_guide/style_guide/basic_rules.rst | 69 ++ .../dev_guide/style_guide/grammar_punctuation.rst | 201 +++++ .../style_guide/images/commas-matter-2.jpg | Bin 0 -> 53403 bytes .../dev_guide/style_guide/images/commas-matter.jpg | Bin 0 -> 85373 bytes .../dev_guide/style_guide/images/hyphen-funny.jpg | Bin 0 -> 49628 bytes .../dev_guide/style_guide/images/thenvsthan.jpg | Bin 0 -> 36500 bytes docs/docsite/rst/dev_guide/style_guide/index.rst | 244 ++++++ .../rst/dev_guide/style_guide/resources.rst | 10 + .../rst/dev_guide/style_guide/search_hints.rst | 48 ++ .../dev_guide/style_guide/spelling_word_choice.rst | 327 ++++++++ .../rst/dev_guide/style_guide/trademarks.rst | 96 +++ .../rst/dev_guide/style_guide/voice_style.rst | 20 + docs/docsite/rst/dev_guide/style_guide/why_use.rst | 23 + docs/docsite/rst/dev_guide/testing.rst | 243 ++++++ .../testing/sanity/action-plugin-docs.rst | 4 + .../rst/dev_guide/testing/sanity/ansible-doc.rst | 4 + .../sanity/ansible-var-precedence-check.rst | 6 + .../testing/sanity/azure-requirements.rst | 10 + .../rst/dev_guide/testing/sanity/bin-symlinks.rst | 11 + .../rst/dev_guide/testing/sanity/boilerplate.rst | 11 + .../rst/dev_guide/testing/sanity/botmeta.rst | 4 + .../rst/dev_guide/testing/sanity/changelog.rst | 17 + .../rst/dev_guide/testing/sanity/compile.rst | 4 + .../testing/sanity/configure-remoting-ps1.rst | 5 + .../dev_guide/testing/sanity/deprecated-config.rst | 6 + .../rst/dev_guide/testing/sanity/docs-build.rst | 4 + .../rst/dev_guide/testing/sanity/empty-init.rst | 10 + .../testing/sanity/future-import-boilerplate.rst | 51 ++ .../rst/dev_guide/testing/sanity/ignores.rst | 99 +++ .../rst/dev_guide/testing/sanity/import.rst | 5 + .../testing/sanity/integration-aliases.rst | 182 +++++ .../rst/dev_guide/testing/sanity/line-endings.rst | 4 + .../testing/sanity/metaclass-boilerplate.rst | 23 + .../rst/dev_guide/testing/sanity/no-assert.rst | 16 + .../rst/dev_guide/testing/sanity/no-basestring.rst | 11 + .../dev_guide/testing/sanity/no-dict-iteritems.rst | 16 + .../dev_guide/testing/sanity/no-dict-iterkeys.rst | 9 + .../testing/sanity/no-dict-itervalues.rst | 16 + .../dev_guide/testing/sanity/no-get-exception.rst | 28 + .../testing/sanity/no-illegal-filenames.rst | 61 ++ .../dev_guide/testing/sanity/no-main-display.rst | 12 + .../dev_guide/testing/sanity/no-smart-quotes.rst | 4 + .../testing/sanity/no-tests-as-filters.rst | 12 + .../testing/sanity/no-underscore-variable.rst | 30 + .../testing/sanity/no-unicode-literals.rst | 16 + .../dev_guide/testing/sanity/no-unwanted-files.rst | 13 + .../testing/sanity/no-wildcard-import.rst | 31 + .../dev_guide/testing/sanity/obsolete-files.rst | 14 + .../rst/dev_guide/testing/sanity/package-data.rst | 5 + docs/docsite/rst/dev_guide/testing/sanity/pep8.rst | 6 + .../rst/dev_guide/testing/sanity/pslint.rst | 4 + .../testing/sanity/pylint-ansible-test.rst | 8 + .../rst/dev_guide/testing/sanity/pylint.rst | 4 + .../rst/dev_guide/testing/sanity/release-names.rst | 4 + .../dev_guide/testing/sanity/replace-urlopen.rst | 4 + .../sanity/required-and-default-attributes.rst | 5 + .../rst/dev_guide/testing/sanity/rstcheck.rst | 4 + .../dev_guide/testing/sanity/runtime-metadata.rst | 7 + .../rst/dev_guide/testing/sanity/sanity-docs.rst | 4 + .../rst/dev_guide/testing/sanity/shebang.rst | 16 + .../rst/dev_guide/testing/sanity/shellcheck.rst | 4 + .../rst/dev_guide/testing/sanity/symlinks.rst | 6 + .../dev_guide/testing/sanity/test-constraints.rst | 4 + .../dev_guide/testing/sanity/update-bundled.rst | 31 + .../testing/sanity/use-argspec-type-path.rst | 10 + .../dev_guide/testing/sanity/use-compat-six.rst | 4 + .../dev_guide/testing/sanity/validate-modules.rst | 6 + .../rst/dev_guide/testing/sanity/yamllint.rst | 4 + docs/docsite/rst/dev_guide/testing_compile.rst | 76 ++ .../rst/dev_guide/testing_documentation.rst | 36 + docs/docsite/rst/dev_guide/testing_httptester.rst | 27 + docs/docsite/rst/dev_guide/testing_integration.rst | 236 ++++++ .../rst/dev_guide/testing_integration_legacy.rst | 108 +++ docs/docsite/rst/dev_guide/testing_pep8.rst | 24 + .../rst/dev_guide/testing_running_locally.rst | 89 +++ docs/docsite/rst/dev_guide/testing_sanity.rst | 53 ++ docs/docsite/rst/dev_guide/testing_units.rst | 213 +++++ .../rst/dev_guide/testing_units_modules.rst | 563 +++++++++++++ .../rst/dev_guide/testing_validate-modules.rst | 165 ++++ 107 files changed, 11377 insertions(+) create mode 100644 docs/docsite/rst/dev_guide/debugging.rst create mode 100644 docs/docsite/rst/dev_guide/developing_api.rst create mode 100644 docs/docsite/rst/dev_guide/developing_collections.rst create mode 100644 docs/docsite/rst/dev_guide/developing_core.rst create mode 100644 docs/docsite/rst/dev_guide/developing_inventory.rst create mode 100644 docs/docsite/rst/dev_guide/developing_locally.rst create mode 100644 docs/docsite/rst/dev_guide/developing_module_utilities.rst create mode 100644 docs/docsite/rst/dev_guide/developing_modules.rst create mode 100644 docs/docsite/rst/dev_guide/developing_modules_best_practices.rst create mode 100644 docs/docsite/rst/dev_guide/developing_modules_checklist.rst create mode 100644 docs/docsite/rst/dev_guide/developing_modules_documenting.rst create mode 100644 docs/docsite/rst/dev_guide/developing_modules_general.rst create mode 100644 docs/docsite/rst/dev_guide/developing_modules_general_aci.rst create mode 100644 docs/docsite/rst/dev_guide/developing_modules_general_windows.rst create mode 100644 docs/docsite/rst/dev_guide/developing_modules_in_groups.rst create mode 100644 docs/docsite/rst/dev_guide/developing_plugins.rst create mode 100644 docs/docsite/rst/dev_guide/developing_program_flow_modules.rst create mode 100644 docs/docsite/rst/dev_guide/developing_python_3.rst create mode 100644 docs/docsite/rst/dev_guide/developing_rebasing.rst create mode 100644 docs/docsite/rst/dev_guide/index.rst create mode 100644 docs/docsite/rst/dev_guide/migrating_roles.rst create mode 100644 docs/docsite/rst/dev_guide/module_lifecycle.rst create mode 100644 docs/docsite/rst/dev_guide/overview_architecture.rst create mode 100644 docs/docsite/rst/dev_guide/platforms/aws_guidelines.rst create mode 100644 docs/docsite/rst/dev_guide/platforms/openstack_guidelines.rst create mode 100644 docs/docsite/rst/dev_guide/platforms/ovirt_dev_guide.rst create mode 100644 docs/docsite/rst/dev_guide/platforms/vmware_guidelines.rst create mode 100644 docs/docsite/rst/dev_guide/shared_snippets/licensing.txt create mode 100644 docs/docsite/rst/dev_guide/style_guide/basic_rules.rst create mode 100644 docs/docsite/rst/dev_guide/style_guide/grammar_punctuation.rst create mode 100644 docs/docsite/rst/dev_guide/style_guide/images/commas-matter-2.jpg create mode 100644 docs/docsite/rst/dev_guide/style_guide/images/commas-matter.jpg create mode 100644 docs/docsite/rst/dev_guide/style_guide/images/hyphen-funny.jpg create mode 100644 docs/docsite/rst/dev_guide/style_guide/images/thenvsthan.jpg create mode 100644 docs/docsite/rst/dev_guide/style_guide/index.rst create mode 100644 docs/docsite/rst/dev_guide/style_guide/resources.rst create mode 100644 docs/docsite/rst/dev_guide/style_guide/search_hints.rst create mode 100644 docs/docsite/rst/dev_guide/style_guide/spelling_word_choice.rst create mode 100644 docs/docsite/rst/dev_guide/style_guide/trademarks.rst create mode 100644 docs/docsite/rst/dev_guide/style_guide/voice_style.rst create mode 100644 docs/docsite/rst/dev_guide/style_guide/why_use.rst create mode 100644 docs/docsite/rst/dev_guide/testing.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/action-plugin-docs.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/ansible-doc.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/ansible-var-precedence-check.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/azure-requirements.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/bin-symlinks.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/boilerplate.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/botmeta.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/changelog.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/compile.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/configure-remoting-ps1.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/deprecated-config.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/docs-build.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/empty-init.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/future-import-boilerplate.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/ignores.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/import.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/integration-aliases.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/line-endings.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/metaclass-boilerplate.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/no-assert.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/no-basestring.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/no-dict-iteritems.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/no-dict-iterkeys.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/no-dict-itervalues.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/no-get-exception.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/no-illegal-filenames.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/no-main-display.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/no-smart-quotes.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/no-tests-as-filters.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/no-underscore-variable.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/no-unicode-literals.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/no-unwanted-files.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/no-wildcard-import.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/obsolete-files.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/package-data.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/pep8.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/pslint.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/pylint-ansible-test.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/pylint.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/release-names.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/replace-urlopen.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/required-and-default-attributes.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/rstcheck.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/runtime-metadata.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/sanity-docs.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/shebang.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/shellcheck.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/symlinks.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/test-constraints.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/update-bundled.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/use-argspec-type-path.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/use-compat-six.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/validate-modules.rst create mode 100644 docs/docsite/rst/dev_guide/testing/sanity/yamllint.rst create mode 100644 docs/docsite/rst/dev_guide/testing_compile.rst create mode 100644 docs/docsite/rst/dev_guide/testing_documentation.rst create mode 100644 docs/docsite/rst/dev_guide/testing_httptester.rst create mode 100644 docs/docsite/rst/dev_guide/testing_integration.rst create mode 100644 docs/docsite/rst/dev_guide/testing_integration_legacy.rst create mode 100644 docs/docsite/rst/dev_guide/testing_pep8.rst create mode 100644 docs/docsite/rst/dev_guide/testing_running_locally.rst create mode 100644 docs/docsite/rst/dev_guide/testing_sanity.rst create mode 100644 docs/docsite/rst/dev_guide/testing_units.rst create mode 100644 docs/docsite/rst/dev_guide/testing_units_modules.rst create mode 100644 docs/docsite/rst/dev_guide/testing_validate-modules.rst (limited to 'docs/docsite/rst/dev_guide') diff --git a/docs/docsite/rst/dev_guide/debugging.rst b/docs/docsite/rst/dev_guide/debugging.rst new file mode 100644 index 00000000..6885b252 --- /dev/null +++ b/docs/docsite/rst/dev_guide/debugging.rst @@ -0,0 +1,112 @@ +.. _debugging_modules: + +***************** +Debugging modules +***************** + +.. contents:: + :local: + +.. _detailed_debugging: + +Detailed debugging steps +======================== + +Ansible modules are put together as a zip file consisting of the module file and the various Python module boilerplate inside of a wrapper script. To see what is actually happening in the module, you need to extract the file from the wrapper. The wrapper script provides helper methods that let you do that. + +The following steps use ``localhost`` as the target host, but you can use the same steps to debug against remote hosts as well. For a simpler approach to debugging without using the temporary files, see :ref:`simple debugging `. + + +#. Set :envvar:`ANSIBLE_KEEP_REMOTE_FILES` to ``1`` on the control host so Ansible will keep the remote module files instead of deleting them after the module finishes executing. Use the ``-vvv`` option to make Ansible more verbose. This will display the file name of the temporary module file. + + .. code-block:: shell-session + + $ ANSIBLE_KEEP_REMOTE_FILES=1 ansible localhost -m ping -a 'data=debugging_session' -vvv + <127.0.0.1> ESTABLISH LOCAL CONNECTION FOR USER: badger + <127.0.0.1> EXEC /bin/sh -c '( umask 77 && mkdir -p "` echo $HOME/.ansible/tmp/ansible-tmp-1461434734.35-235318071810595 `" && echo "` echo $HOME/.ansible/tmp/ansible-tmp-1461434734.35-235318071810595 `" )' + <127.0.0.1> PUT /var/tmp/tmpjdbJ1w TO /home/badger/.ansible/tmp/ansible-tmp-1461434734.35-235318071810595/AnsiballZ_ping.py + <127.0.0.1> EXEC /bin/sh -c 'LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 /usr/bin/python /home/badger/.ansible/tmp/ansible-tmp-1461434734.35-235318071810595/AnsiballZ_ping.py && sleep 0' + localhost | SUCCESS => { + "changed": false, + "invocation": { + "module_args": { + "data": "debugging_session" + }, + "module_name": "ping" + }, + "ping": "debugging_session" + } + +#. Navigate to the temporary directory from the previous step. If the previous command was run against a remote host, connect to that host first before trying to navigate to the temporary directory. + + .. code-block:: shell-session + + $ ssh remotehost # only if not debugging against localhost + $ cd /home/badger/.ansible/tmp/ansible-tmp-1461434734.35-235318071810595 + +#. Run the wrapper's ``explode`` command to turn the string into some Python files that you can work with. + + .. code-block:: shell-session + + $ python AnsiballZ_ping.py explode + Module expanded into: + /home/badger/.ansible/tmp/ansible-tmp-1461434734.35-235318071810595/debug_dir + + If you want to examine the wrapper file you can. It will show a small Python script with a large base64 encoded string. The string contains the module to execute. + +#. When you look into the temporary directory you'll see a structure like this: + + .. code-block:: shell-session + + ├── AnsiballZ_ping.py + └── debug_dir + ├── ansible + │   ├── __init__.py + │   ├── module_utils + │   │   ├── __init__.py + │   │   ├── _text.py + │   │   ├── basic.py + │   │   ├── common + │   │   ├── compat + │   │   ├── distro + │   │   ├── parsing + │   │   ├── pycompat24.py + │   │   └── six + │   └── modules + │   ├── __init__.py + │   └── ping.py + └── args + + * ``AnsiballZ_ping.py`` is the Python script with the the module code stored in a base64 encoded string. It contains various helper functions for executing the module. + + * ``ping.py`` is the code for the module itself. You can modify this code to see what effect it would have on your module, or for debugging purposes. + + * The ``args`` file contains a JSON string. The string is a dictionary containing the module arguments and other variables that Ansible passes into the module to change its behavior. Modify this file to change the parameters passed to the module. + + * The ``ansible`` directory contains the module code in ``modules`` as well as code from :mod:`ansible.module_utils` that is used by the module. Ansible includes files for any :mod:`ansible.module_utils` imports in the module but not any files from any other module. If your module uses :mod:`ansible.module_utils.url` Ansible will include it for you. But if your module includes `requests `_, then you'll have to make sure that the Python `requests library `_ is installed on the system before running the module. + + You can modify files in this directory if you suspect that the module is having a problem in some of this boilerplate code rather than in the module code you have written. + +#. Once you edit the code or arguments in the exploded tree, use the ``execute`` subcommand to run it: + + .. code-block:: shell-session + + $ python AnsiballZ_ping.py execute + {"invocation": {"module_args": {"data": "debugging_session"}}, "changed": false, "ping": "debugging_session"} + + This subcommand inserts the absolute path to ``debug_dir`` as the first item in ``sys.path`` and invokes the script using the arguments in the ``args`` file. You can continue to run the module like this until you understand the problem. Then you can copy the changes back into your real module file and test that the real module works via ``ansible`` or ``ansible-playbook``. + + +.. _simple_debugging: + +Simple debugging +================ + +The easiest way to run a debugger in a module, either local or remote, is to use `epdb `_. Add ``import epdb; epdb.serve()`` in the module code on the control node at the desired break point. To connect to the debugger, run ``epdb.connect()``. See the `epdb documentation `_ for how to specify the ``host`` and ``port``. If connecting to a remote node, make sure to use a port that is allowed by any firewall between the control node and the remote node. + +This technique should work with any remote debugger, but we do not guarantee any particual remote debugging tool will work. + +The `q `_ library is another very useful debugging tool. + +Since ``print()`` statements do not work inside modules, raising an exception is a good approach if you just want to see some specific data. Put ``raise Exception(some_value)`` somewhere in the module and run it normally. Ansible will handle this exception, pass the message back to the control node, and display it. + diff --git a/docs/docsite/rst/dev_guide/developing_api.rst b/docs/docsite/rst/dev_guide/developing_api.rst new file mode 100644 index 00000000..eeff4684 --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_api.rst @@ -0,0 +1,47 @@ +.. _developing_api: + +********** +Python API +********** + +.. contents:: Topics + +.. note:: This API is intended for internal Ansible use. Ansible may make changes to this API at any time that could break backward compatibility with older versions of the API. Because of this, external use is not supported by Ansible. If you want to use Python API only for executing playbooks or modules, consider `ansible-runner `_ first. + +There are several ways to use Ansible from an API perspective. You can use +the Ansible Python API to control nodes, you can extend Ansible to respond to various Python events, you can +write plugins, and you can plug in inventory data from external data sources. This document +gives a basic overview and examples of the Ansible execution and playbook API. + +If you would like to use Ansible programmatically from a language other than Python, trigger events asynchronously, +or have access control and logging demands, please see the `Ansible Tower documentation `_. + +.. note:: Because Ansible relies on forking processes, this API is not thread safe. + +.. _python_api_example: + +Python API example +================== + +This example is a simple demonstration that shows how to minimally run a couple of tasks: + +.. literalinclude:: ../../../../examples/scripts/uptime.py + :language: python + +.. note:: Ansible emits warnings and errors via the display object, which prints directly to stdout, stderr and the Ansible log. + +The source code for the ``ansible`` +command line tools (``lib/ansible/cli/``) is `available on GitHub `_. + +.. seealso:: + + :ref:`developing_inventory` + Developing dynamic inventory integrations + :ref:`developing_modules_general` + Getting started on developing a module + :ref:`developing_plugins` + How to develop plugins + `Development Mailing List `_ + Mailing list for development topics + `irc.freenode.net `_ + #ansible IRC chat channel diff --git a/docs/docsite/rst/dev_guide/developing_collections.rst b/docs/docsite/rst/dev_guide/developing_collections.rst new file mode 100644 index 00000000..3aa25502 --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_collections.rst @@ -0,0 +1,812 @@ + +.. _developing_collections: + +********************** +Developing collections +********************** + +Collections are a distribution format for Ansible content. You can use collections to package and distribute playbooks, roles, modules, and plugins. +You can publish and use collections through `Ansible Galaxy `_. + +* For details on how to *use* collections see :ref:`collections`. +* For the current development status of Collections and FAQ see `Ansible Collections Overview and FAQ `_. + +.. contents:: + :local: + :depth: 2 + +.. _collection_structure: + +Collection structure +==================== + +Collections follow a simple data structure. None of the directories are required unless you have specific content that belongs in one of them. A collection does require a ``galaxy.yml`` file at the root level of the collection. This file contains all of the metadata that Galaxy and other tools need in order to package, build and publish the collection:: + + collection/ + ├── docs/ + ├── galaxy.yml + ├── meta/ + │ └── runtime.yml + ├── plugins/ + │ ├── modules/ + │ │ └── module1.py + │ ├── inventory/ + │ └── .../ + ├── README.md + ├── roles/ + │ ├── role1/ + │ ├── role2/ + │ └── .../ + ├── playbooks/ + │ ├── files/ + │ ├── vars/ + │ ├── templates/ + │ └── tasks/ + └── tests/ + + +.. note:: + * Ansible only accepts ``.md`` extensions for the :file:`README` file and any files in the :file:`/docs` folder. + * See the `ansible-collections `_ GitHub Org for examples of collection structure. + * Not all directories are currently in use. Those are placeholders for future features. + +.. _galaxy_yml: + +galaxy.yml +---------- + +A collection must have a ``galaxy.yml`` file that contains the necessary information to build a collection artifact. +See :ref:`collections_galaxy_meta` for details. + +.. _collections_doc_dir: + +docs directory +--------------- + +Put general documentation for the collection here. Keep the specific documentation for plugins and modules embedded as Python docstrings. Use the ``docs`` folder to describe how to use the roles and plugins the collection provides, role requirements, and so on. Use markdown and do not add subfolders. + +Use ``ansible-doc`` to view documentation for plugins inside a collection: + +.. code-block:: bash + + ansible-doc -t lookup my_namespace.my_collection.lookup1 + +The ``ansible-doc`` command requires the fully qualified collection name (FQCN) to display specific plugin documentation. In this example, ``my_namespace`` is the Galaxy namespace and ``my_collection`` is the collection name within that namespace. + +.. note:: The Galaxy namespace of an Ansible collection is defined in the ``galaxy.yml`` file. It can be different from the GitHub organization or repository name. + +.. _collections_plugin_dir: + +plugins directory +------------------ + +Add a 'per plugin type' specific subdirectory here, including ``module_utils`` which is usable not only by modules, but by most plugins by using their FQCN. This is a way to distribute modules, lookups, filters, and so on without having to import a role in every play. + +Vars plugins are unsupported in collections. Cache plugins may be used in collections for fact caching, but are not supported for inventory plugins. + +.. _collection_module_utils: + +module_utils +^^^^^^^^^^^^ + +When coding with ``module_utils`` in a collection, the Python ``import`` statement needs to take into account the FQCN along with the ``ansible_collections`` convention. The resulting Python import will look like ``from ansible_collections.{namespace}.{collection}.plugins.module_utils.{util} import {something}`` + +The following example snippets show a Python and PowerShell module using both default Ansible ``module_utils`` and +those provided by a collection. In this example the namespace is ``community``, the collection is ``test_collection``. +In the Python example the ``module_util`` in question is called ``qradar`` such that the FQCN is +``community.test_collection.plugins.module_utils.qradar``: + +.. code-block:: python + + from ansible.module_utils.basic import AnsibleModule + from ansible.module_utils._text import to_text + + from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus + from ansible.module_utils.six.moves.urllib.error import HTTPError + from ansible_collections.community.test_collection.plugins.module_utils.qradar import QRadarRequest + + argspec = dict( + name=dict(required=True, type='str'), + state=dict(choices=['present', 'absent'], required=True), + ) + + module = AnsibleModule( + argument_spec=argspec, + supports_check_mode=True + ) + + qradar_request = QRadarRequest( + module, + headers={"Content-Type": "application/json"}, + not_rest_data_keys=['state'] + ) + +Note that importing something from an ``__init__.py`` file requires using the file name: + +.. code-block:: python + + from ansible_collections.namespace.collection_name.plugins.callback.__init__ import CustomBaseClass + +In the PowerShell example the ``module_util`` in question is called ``hyperv`` such that the FCQN is +``community.test_collection.plugins.module_utils.hyperv``: + +.. code-block:: powershell + + #!powershell + #AnsibleRequires -CSharpUtil Ansible.Basic + #AnsibleRequires -PowerShell ansible_collections.community.test_collection.plugins.module_utils.hyperv + + $spec = @{ + name = @{ required = $true; type = "str" } + state = @{ required = $true; choices = @("present", "absent") } + } + $module = [Ansible.Basic.AnsibleModule]::Create($args, $spec) + + Invoke-HyperVFunction -Name $module.Params.name + + $module.ExitJson() + +.. _collections_roles_dir: + +roles directory +---------------- + +Collection roles are mostly the same as existing roles, but with a couple of limitations: + + - Role names are now limited to contain only lowercase alphanumeric characters, plus ``_`` and start with an alpha character. + - Roles in a collection cannot contain plugins any more. Plugins must live in the collection ``plugins`` directory tree. Each plugin is accessible to all roles in the collection. + +The directory name of the role is used as the role name. Therefore, the directory name must comply with the +above role name rules. +The collection import into Galaxy will fail if a role name does not comply with these rules. + +You can migrate 'traditional roles' into a collection but they must follow the rules above. You may need to rename roles if they don't conform. You will have to move or link any role-based plugins to the collection specific directories. + +.. note:: + + For roles imported into Galaxy directly from a GitHub repository, setting the ``role_name`` value in the role's metadata overrides the role name used by Galaxy. For collections, that value is ignored. When importing a collection, Galaxy uses the role directory as the name of the role and ignores the ``role_name`` metadata value. + +playbooks directory +-------------------- + +TBD. + +.. _developing_collections_tests_directory: + +tests directory +---------------- + +Ansible Collections are tested much like Ansible itself, by using the +`ansible-test` utility which is released as part of Ansible, version 2.9.0 and +newer. Because Ansible Collections are tested using the same tooling as Ansible +itself, via `ansible-test`, all Ansible developer documentation for testing is +applicable for authoring Collections Tests with one key concept to keep in mind. + +See :ref:`testing_collections` for specific information on how to test collections +with ``ansible-test``. + +When reading the :ref:`developing_testing` documentation, there will be content +that applies to running Ansible from source code via a git clone, which is +typical of an Ansible developer. However, it's not always typical for an Ansible +Collection author to be running Ansible from source but instead from a stable +release, and to create Collections it is not necessary to run Ansible from +source. Therefore, when references of dealing with `ansible-test` binary paths, +command completion, or environment variables are presented throughout the +:ref:`developing_testing` documentation; keep in mind that it is not needed for +Ansible Collection Testing because the act of installing the stable release of +Ansible containing `ansible-test` is expected to setup those things for you. + +.. _meta_runtime_yml: + +meta directory +-------------- + +A collection can store some additional metadata in a ``runtime.yml`` file in the collection's ``meta`` directory. The ``runtime.yml`` file supports the top level keys: + +- *requires_ansible*: + + The version of Ansible required to use the collection. Multiple versions can be separated with a comma. + + .. code:: yaml + + requires_ansible: ">=2.10,<2.11" + + .. note:: although the version is a `PEP440 Version Specifier `_ under the hood, Ansible deviates from PEP440 behavior by truncating prerelease segments from the Ansible version. This means that Ansible 2.11.0b1 is compatible with something that ``requires_ansible: ">=2.11"``. + +- *plugin_routing*: + + Content in a collection that Ansible needs to load from another location or that has been deprecated/removed. + The top level keys of ``plugin_routing`` are types of plugins, with individual plugin names as subkeys. + To define a new location for a plugin, set the ``redirect`` field to another name. + To deprecate a plugin, use the ``deprecation`` field to provide a custom warning message and the removal version or date. If the plugin has been renamed or moved to a new location, the ``redirect`` field should also be provided. If a plugin is being removed entirely, ``tombstone`` can be used for the fatal error message and removal version or date. + + .. code:: yaml + + plugin_routing: + inventory: + kubevirt: + redirect: community.general.kubevirt + my_inventory: + tombstone: + removal_version: "2.0.0" + warning_text: my_inventory has been removed. Please use other_inventory instead. + modules: + my_module: + deprecation: + removal_date: "2021-11-30" + warning_text: my_module will be removed in a future release of this collection. Use another.collection.new_module instead. + redirect: another.collection.new_module + podman_image: + redirect: containers.podman.podman_image + module_utils: + ec2: + redirect: amazon.aws.ec2 + util_dir.subdir.my_util: + redirect: namespace.name.my_util + +- *import_redirection* + + A mapping of names for Python import statements and their redirected locations. + + .. code:: yaml + + import_redirection: + ansible.module_utils.old_utility: + redirect: ansible_collections.namespace_name.collection_name.plugins.module_utils.new_location + + +.. _creating_collections_skeleton: + +Creating a collection skeleton +------------------------------ + +To start a new collection: + +.. code-block:: bash + + collection_dir#> ansible-galaxy collection init my_namespace.my_collection + +.. note:: + + Both the namespace and collection names use the same strict set of requirements. See `Galaxy namespaces `_ on the Galaxy docsite for those requirements. + +Once the skeleton exists, you can populate the directories with the content you want inside the collection. See `ansible-collections `_ GitHub Org to get a better idea of what you can place inside a collection. + +.. _creating_collections: + +Creating collections +====================== + +To create a collection: + +#. Create a collection skeleton with the ``collection init`` command. See :ref:`creating_collections_skeleton` above. +#. Add your content to the collection. +#. Build the collection into a collection artifact with :ref:`ansible-galaxy collection build`. +#. Publish the collection artifact to Galaxy with :ref:`ansible-galaxy collection publish`. + +A user can then install your collection on their systems. + +Currently the ``ansible-galaxy collection`` command implements the following sub commands: + +* ``init``: Create a basic collection skeleton based on the default template included with Ansible or your own template. +* ``build``: Create a collection artifact that can be uploaded to Galaxy or your own repository. +* ``publish``: Publish a built collection artifact to Galaxy. +* ``install``: Install one or more collections. + +To learn more about the ``ansible-galaxy`` command-line tool, see the :ref:`ansible-galaxy` man page. + + +.. _docfragments_collections: + +Using documentation fragments in collections +-------------------------------------------- + +To include documentation fragments in your collection: + +#. Create the documentation fragment: ``plugins/doc_fragments/fragment_name``. + +#. Refer to the documentation fragment with its FQCN. + +.. code-block:: yaml + + extends_documentation_fragment: + - community.kubernetes.k8s_name_options + - community.kubernetes.k8s_auth_options + - community.kubernetes.k8s_resource_options + - community.kubernetes.k8s_scale_options + +:ref:`module_docs_fragments` covers the basics for documentation fragments. The `kubernetes `_ collection includes a complete example. + +You can also share documentation fragments across collections with the FQCN. + +.. _building_collections: + +Building collections +-------------------- + +To build a collection, run ``ansible-galaxy collection build`` from inside the root directory of the collection: + +.. code-block:: bash + + collection_dir#> ansible-galaxy collection build + +This creates a tarball of the built collection in the current directory which can be uploaded to Galaxy.:: + + my_collection/ + ├── galaxy.yml + ├── ... + ├── my_namespace-my_collection-1.0.0.tar.gz + └── ... + +.. note:: + * Certain files and folders are excluded when building the collection artifact. See :ref:`ignoring_files_and_folders_collections` to exclude other files you would not want to distribute. + * If you used the now-deprecated ``Mazer`` tool for any of your collections, delete any and all files it added to your :file:`releases/` directory before you build your collection with ``ansible-galaxy``. + * The current Galaxy maximum tarball size is 2 MB. + + +This tarball is mainly intended to upload to Galaxy +as a distribution method, but you can use it directly to install the collection on target systems. + +.. _ignoring_files_and_folders_collections: + +Ignoring files and folders +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +By default the build step will include all the files in the collection directory in the final build artifact except for the following: + +* ``galaxy.yml`` +* ``*.pyc`` +* ``*.retry`` +* ``tests/output`` +* previously built artifacts in the root directory +* various version control directories like ``.git/`` + +To exclude other files and folders when building the collection, you can set a list of file glob-like patterns in the +``build_ignore`` key in the collection's ``galaxy.yml`` file. These patterns use the following special characters for +wildcard matching: + +* ``*``: Matches everything +* ``?``: Matches any single character +* ``[seq]``: Matches and character in seq +* ``[!seq]``:Matches any character not in seq + +For example, if you wanted to exclude the :file:`sensitive` folder within the ``playbooks`` folder as well any ``.tar.gz`` archives you +can set the following in your ``galaxy.yml`` file: + +.. code-block:: yaml + + build_ignore: + - playbooks/sensitive + - '*.tar.gz' + +.. note:: + This feature is only supported when running ``ansible-galaxy collection build`` with Ansible 2.10 or newer. + + +.. _trying_collection_locally: + +Trying collections locally +-------------------------- + +You can try your collection locally by installing it from the tarball. The following will enable an adjacent playbook to +access the collection: + +.. code-block:: bash + + ansible-galaxy collection install my_namespace-my_collection-1.0.0.tar.gz -p ./collections + + +You should use one of the values configured in :ref:`COLLECTIONS_PATHS` for your path. This is also where Ansible itself will +expect to find collections when attempting to use them. If you don't specify a path value, ``ansible-galaxy collection install`` +installs the collection in the first path defined in :ref:`COLLECTIONS_PATHS`, which by default is ``~/.ansible/collections``. + +Next, try using the local collection inside a playbook. For examples and more details see :ref:`Using collections ` + +.. _collections_scm_install: + +Installing collections from a git repository +-------------------------------------------- + +You can also test a version of your collection in development by installing it from a git repository. + +.. code-block:: bash + + ansible-galaxy collection install git+https://github.com/org/repo.git,devel + +.. include:: ../shared_snippets/installing_collections_git_repo.txt + +.. _publishing_collections: + +Publishing collections +---------------------- + +You can publish collections to Galaxy using the ``ansible-galaxy collection publish`` command or the Galaxy UI itself. You need a namespace on Galaxy to upload your collection. See `Galaxy namespaces `_ on the Galaxy docsite for details. + +.. note:: Once you upload a version of a collection, you cannot delete or modify that version. Ensure that everything looks okay before you upload it. + +.. _galaxy_get_token: + +Getting your API token +^^^^^^^^^^^^^^^^^^^^^^ + +To upload your collection to Galaxy, you must first obtain an API token (``--token`` in the ``ansible-galaxy`` CLI command or ``token`` in the :file:`ansible.cfg` file under the ``galaxy_server`` section). The API token is a secret token used to protect your content. + +To get your API token: + +* For Galaxy, go to the `Galaxy profile preferences `_ page and click :guilabel:`API Key`. +* For Automation Hub, go to https://cloud.redhat.com/ansible/automation-hub/token/ and click :guilabel:`Load token` from the version dropdown. + +Storing or using your API token +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Once you have retrieved your API token, you can store or use the token for collections in two ways: + +* Pass the token to the ``ansible-galaxy`` command using the ``--token``. +* Specify the token within a Galaxy server list in your :file:`ansible.cfg` file. + +Using the ``token`` argument +............................ + +You can use the ``--token`` argument with the ``ansible-galaxy`` command (in conjunction with the ``--server`` argument or :ref:`GALAXY_SERVER` setting in your :file:`ansible.cfg` file). You cannot use ``apt-key`` with any servers defined in your :ref:`Galaxy server list `. + +.. code-block:: text + + ansible-galaxy collection publish ./geerlingguy-collection-1.2.3.tar.gz --token= + + +Specify the token within a Galaxy server list +............................................. + +With this option, you configure one or more servers for Galaxy in your :file:`ansible.cfg` file under the ``galaxy_server_list`` section. For each server, you also configure the token. + + +.. code-block:: ini + + [galaxy] + server_list = release_galaxy + + [galaxy_server.release_galaxy] + url=https://galaxy.ansible.com/ + token=my_token + +See :ref:`galaxy_server_config` for complete details. + +.. _upload_collection_ansible_galaxy: + +Upload using ansible-galaxy +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. note:: + By default, ``ansible-galaxy`` uses https://galaxy.ansible.com as the Galaxy server (as listed in the :file:`ansible.cfg` file under :ref:`galaxy_server`). If you are only publishing your collection to Ansible Galaxy, you do not need any further configuration. If you are using Red Hat Automation Hub or any other Galaxy server, see :ref:`Configuring the ansible-galaxy client `. + +To upload the collection artifact with the ``ansible-galaxy`` command: + +.. code-block:: bash + + ansible-galaxy collection publish path/to/my_namespace-my_collection-1.0.0.tar.gz + +.. note:: + + The above command assumes you have retrieved and stored your API token as part of a Galaxy server list. See :ref:`galaxy_get_token` for details. + +The ``ansible-galaxy collection publish`` command triggers an import process, just as if you uploaded the collection through the Galaxy website. +The command waits until the import process completes before reporting the status back. If you want to continue +without waiting for the import result, use the ``--no-wait`` argument and manually look at the import progress in your +`My Imports `_ page. + + +.. _upload_collection_galaxy: + +Upload a collection from the Galaxy website +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +To upload your collection artifact directly on Galaxy: + +#. Go to the `My Content `_ page, and click the **Add Content** button on one of your namespaces. +#. From the **Add Content** dialogue, click **Upload New Collection**, and select the collection archive file from your local filesystem. + +When uploading collections it doesn't matter which namespace you select. The collection will be uploaded to the +namespace specified in the collection metadata in the ``galaxy.yml`` file. If you're not an owner of the +namespace, the upload request will fail. + +Once Galaxy uploads and accepts a collection, you will be redirected to the **My Imports** page, which displays output from the +import process, including any errors or warnings about the metadata and content contained in the collection. + +.. _collection_versions: + +Collection versions +------------------- + +Once you upload a version of a collection, you cannot delete or modify that version. Ensure that everything looks okay before +uploading. The only way to change a collection is to release a new version. The latest version of a collection (by highest version number) +will be the version displayed everywhere in Galaxy; however, users will still be able to download older versions. + +Collection versions use `Semantic Versioning `_ for version numbers. Please read the official documentation for details and examples. In summary: + +* Increment major (for example: x in `x.y.z`) version number for an incompatible API change. +* Increment minor (for example: y in `x.y.z`) version number for new functionality in a backwards compatible manner (for example new modules/plugins, parameters, return values). +* Increment patch (for example: z in `x.y.z`) version number for backwards compatible bug fixes. + +.. _migrate_to_collection: + +Migrating Ansible content to a different collection +==================================================== + +First, look at `Ansible Collection Checklist `_. + +To migrate content from one collection to another, if the collections are parts of `Ansible distribution `_: + +#. Copy content from the source (old) collection to the target (new) collection. +#. Deprecate the module/plugin with ``removal_version`` scheduled for the next major version in ``meta/runtime.yml`` of the old collection. The deprecation must be released after the copied content has been included in a release of the new collection. +#. When the next major release of the old collection is prepared: + + * remove the module/plugin from the old collection + * remove the symlink stored in ``plugin/modules`` directory if appropriate (mainly when removing from ``community.general`` and ``community.network``) + * remove related unit and integration tests + * remove specific module utils + * remove specific documentation fragments if there are any in the old collection + * add a changelog fragment containing entries for ``removed_features`` and ``breaking_changes``; you can see an example of a changelog fragment in this `pull request `_ + * change ``meta/runtime.yml`` in the old collection: + + * add ``redirect`` to the corresponding module/plugin's entry + * in particular, add ``redirect`` for the removed module utils and documentation fragments if applicable + * remove ``removal_version`` from there + * remove related entries from ``tests/sanity/ignore.txt`` files if exist + * remove changelog fragments for removed content that are not yet part of the changelog (in other words, do not modify `changelogs/changelog.yaml` and do not delete files mentioned in it) + * remove requirements that are no longer required in ``tests/unit/requirements.txt``, ``tests/requirements.yml`` and ``galaxy.yml`` + +According to the above, you need to create at least three PRs as follows: + +#. Create a PR against the new collection to copy the content. +#. Deprecate the module/plugin in the old collection. +#. Later create a PR against the old collection to remove the content according to the schedule. + + +Adding the content to the new collection +---------------------------------------- + +Create a PR in the new collection to: + +#. Copy ALL the related files from the old collection. +#. If it is an action plugin, include the corresponding module with documentation. +#. If it is a module, check if it has a corresponding action plugin that should move with it. +#. Check ``meta/`` for relevant updates to ``runtime.yml`` if it exists. +#. Carefully check the moved ``tests/integration`` and ``tests/units`` and update for FQCN. +#. Review ``tests/sanity/ignore-*.txt`` entries in the old collection. +#. Update ``meta/runtime.yml`` in the old collection. + + +Removing the content from the old collection +-------------------------------------------- + +Create a PR against the source collection repository to remove the modules, module_utils, plugins, and docs_fragments related to this migration: + +#. If you are removing an action plugin, remove the corresponding module that contains the documentation. +#. If you are removing a module, remove any corresponding action plugin that should stay with it. +#. Remove any entries about removed plugins from ``meta/runtime.yml``. Ensure they are added into the new repo. +#. Remove sanity ignore lines from ``tests/sanity/ignore\*.txt`` +#. Remove associated integration tests from ``tests/integrations/targets/`` and unit tests from ``tests/units/plugins/``. +#. if you are removing from content from ``community.general`` or ``community.network``, remove entries from ``.github/BOTMETA.yml``. +#. Carefully review ``meta/runtime.yml`` for any entries you may need to remove or update, in particular deprecated entries. +#. Update ``meta/runtime.yml`` to contain redirects for EVERY PLUGIN, pointing to the new collection name. + +.. warning:: + + Maintainers for the old collection have to make sure that the PR is merged in a way that it does not break user experience and semantic versioning: + + #. A new version containing the merged PR must not be released before the collection the content has been moved to has been released again, with that content contained in it. Otherwise the redirects cannot work and users relying on that content will experience breakage. + #. Once 1.0.0 of the collection from which the content has been removed has been released, such PRs can only be merged for a new **major** version (in other words, 2.0.0, 3.0.0, and so on). + + +BOTMETA.yml +----------- + +The ``BOTMETA.yml``, for example in `community.general collection repository `_, is the source of truth for: + +* ansibullbot + +If the old and/or new collection has ``ansibullbot``, its ``BOTMETA.yml`` must be updated correspondingly. + +Ansibulbot will know how to redirect existing issues and PRs to the new repo. +The build process for docs.ansible.com will know where to find the module docs. + +.. code-block:: yaml + + $modules/monitoring/grafana/grafana_plugin.py: + migrated_to: community.grafana + $modules/monitoring/grafana/grafana_dashboard.py: + migrated_to: community.grafana + $modules/monitoring/grafana/grafana_datasource.py: + migrated_to: community.grafana + $plugins/callback/grafana_annotations.py: + maintainers: $team_grafana + labels: monitoring grafana + migrated_to: community.grafana + $plugins/doc_fragments/grafana.py: + maintainers: $team_grafana + labels: monitoring grafana + migrated_to: community.grafana + +`Example PR `_ + +* The ``migrated_to:`` key must be added explicitly for every *file*. You cannot add ``migrated_to`` at the directory level. This is to allow module and plugin webdocs to be redirected to the new collection docs. +* ``migrated_to:`` MUST be added for every: + + * module + * plugin + * module_utils + * contrib/inventory script + +* You do NOT need to add ``migrated_to`` for: + + * Unit tests + * Integration tests + * ReStructured Text docs (anything under ``docs/docsite/rst/``) + * Files that never existed in ``ansible/ansible:devel`` + +.. _testing_collections: + +Testing collections +=================== + +The main tool for testing collections is ``ansible-test``, Ansible's testing tool described in :ref:`developing_testing`. You can run several compile and sanity checks, as well as run unit and integration tests for plugins using ``ansible-test``. When you test collections, test against the ansible-base version(s) you are targeting. + +You must always execute ``ansible-test`` from the root directory of a collection. You can run ``ansible-test`` in Docker containers without installing any special requirements. The Ansible team uses this approach in Shippable both in the ansible/ansible GitHub repository and in the large community collections such as `community.general `_ and `community.network `_. The examples below demonstrate running tests in Docker containers. + +Compile and sanity tests +------------------------ + +To run all compile and sanity tests:: + + ansible-test sanity --docker default -v + +See :ref:`testing_compile` and :ref:`testing_sanity` for more information. See the :ref:`full list of sanity tests ` for details on the sanity tests and how to fix identified issues. + +Unit tests +---------- + +You must place unit tests in the appropriate``tests/unit/plugins/`` directory. For example, you would place tests for ``plugins/module_utils/foo/bar.py`` in ``tests/unit/plugins/module_utils/foo/test_bar.py`` or ``tests/unit/plugins/module_utils/foo/bar/test_bar.py``. For examples, see the `unit tests in community.general `_. + +To run all unit tests for all supported Python versions:: + + ansible-test units --docker default -v + +To run all unit tests only for a specific Python version:: + + ansible-test units --docker default -v --python 3.6 + +To run only a specific unit test:: + + ansible-test units --docker default -v --python 3.6 tests/unit/plugins/module_utils/foo/test_bar.py + +You can specify Python requirements in the ``tests/unit/requirements.txt`` file. See :ref:`testing_units` for more information, especially on fixture files. + +Integration tests +----------------- + +You must place integration tests in the appropriate ``tests/integration/targets/`` directory. For module integration tests, you can use the module name alone. For example, you would place integration tests for ``plugins/modules/foo.py`` in a directory called ``tests/integration/targets/foo/``. For non-module plugin integration tests, you must add the plugin type to the directory name. For example, you would place integration tests for ``plugins/connections/bar.py`` in a directory called ``tests/integration/targets/connection_bar/``. For lookup plugins, the directory must be called ``lookup_foo``, for inventory plugins, ``inventory_foo``, and so on. + +You can write two different kinds of integration tests: + +* Ansible role tests run with ``ansible-playbook`` and validate various aspects of the module. They can depend on other integration tests (usually named ``prepare_bar`` or ``setup_bar``, which prepare a service or install a requirement named ``bar`` in order to test module ``foo``) to set-up required resources, such as installing required libraries or setting up server services. +* ``runme.sh`` tests run directly as scripts. They can set up inventory files, and execute ``ansible-playbook`` or ``ansible-inventory`` with various settings. + +For examples, see the `integration tests in community.general `_. See also :ref:`testing_integration` for more details. + +Since integration tests can install requirements, and set-up, start and stop services, we recommended running them in docker containers or otherwise restricted environments whenever possible. By default, ``ansible-test`` supports Docker images for several operating systems. See the `list of supported docker images `_ for all options. Use the ``default`` image mainly for platform-independent integration tests, such as those for cloud modules. The following examples use the ``centos8`` image. + +To execute all integration tests for a collection:: + + ansible-test integration --docker centos8 -v + +If you want more detailed output, run the command with ``-vvv`` instead of ``-v``. Alternatively, specify ``--retry-on-error`` to automatically re-run failed tests with higher verbosity levels. + +To execute only the integration tests in a specific directory:: + + ansible-test integration --docker centos8 -v connection_bar + +You can specify multiple target names. Each target name is the name of a directory in ``tests/integration/targets/``. + +.. _hacking_collections: + +Contributing to collections +=========================== + +If you want to add functionality to an existing collection, modify a collection you are using to fix a bug, or change the behavior of a module in a collection, clone the git repository for that collection and make changes on a branch. You can combine changes to a collection with a local checkout of Ansible (``source hacking/env-setup``). + +This section describes the process for `community.general `_. To contribute to other collections, replace the folder names ``community`` and ``general`` with the namespace and collection name of a different collection. + +We assume that you have included ``~/dev/ansible/collections/`` in :ref:`COLLECTIONS_PATHS`, and if that path mentions multiple directories, that you made sure that no other directory earlier in the search path contains a copy of ``community.general``. Create the directory ``~/dev/ansible/collections/ansible_collections/community``, and in it clone `the community.general Git repository `_ or a fork of it into the folder ``general``:: + + mkdir -p ~/dev/ansible/collections/ansible_collections/community + cd ~/dev/ansible/collections/ansible_collections/community + git clone git@github.com:ansible-collections/community.general.git general + +If you clone a fork, add the original repository as a remote ``upstream``:: + + cd ~/dev/ansible/collections/ansible_collections/community/general + git remote add upstream git@github.com:ansible-collections/community.general.git + +Now you can use this checkout of ``community.general`` in playbooks and roles with whichever version of Ansible you have installed locally, including a local checkout of ``ansible/ansible``'s ``devel`` branch. + +For collections hosted in the ``ansible_collections`` GitHub org, create a branch and commit your changes on the branch. When you are done (remember to add tests, see :ref:`testing_collections`), push your changes to your fork of the collection and create a Pull Request. For other collections, especially for collections not hosted on GitHub, check the ``README.md`` of the collection for information on contributing to it. + +.. _collection_changelogs: + +Generating changelogs for a collection +====================================== + +We recommend that you use the `antsibull-changelog `_ tool to generate Ansible-compatible changelogs for your collection. The Ansible changelog uses the output of this tool to collate all the collections included in an Ansible release into one combined changelog for the release. + +.. note:: + + Ansible here refers to the Ansible 2.10 or later release that includes a curated set of collections. + +Understanding antsibull-changelog +--------------------------------- + +The ``antsibull-changelog`` tool allows you to create and update changelogs for Ansible collections that are compatible with the combined Ansible changelogs. This is an update to the changelog generator used in prior Ansible releases. The tool adds three new changelog fragment categories: ``breaking_changes``, ``security_fixes`` and ``trivial``. The tool also generates the ``changelog.yaml`` file that Ansible uses to create the combined ``CHANGELOG.rst`` file and Porting Guide for the release. + +See :ref:`changelogs_how_to` and the `antsibull-changelog documentation `_ for complete details. + +.. note:: + + The collection maintainers set the changelog policy for their collections. See the individual collection contributing guidelines for complete details. + +Generating changelogs +--------------------- + +To initialize changelog generation: + +#. Install ``antsibull-changelog``: :code:`pip install antsibull-changelog`. +#. Initialize changelogs for your repository: :code:`antsibull-changelog init `. +#. Optionally, edit the ``changelogs/config.yaml`` file to customize the location of the generated changelog ``.rst`` file or other options. See `Bootstrapping changelogs for collections `_ for details. + +To generate changelogs from the changelog fragments you created: + +#. Optionally, validate your changelog fragments: :code:`antsibull-changelog lint`. +#. Generate the changelog for your release: :code:`antsibull-changelog release [--version version_number]`. + +.. note:: + + Add the ``--reload-plugins`` option if you ran the ``antsibull-changelog release`` command previously and the version of the collection has not changed. ``antsibull-changelog`` caches the information on all plugins and does not update its cache until the collection version changes. + + +Porting Guide entries +---------------------- + +The following changelog fragment categories are consumed by the Ansible changelog generator into the Ansible Porting Guide: + +* ``major_changes`` +* ``breaking_changes`` +* ``deprecated_features`` +* ``removed_features`` + +Including collection changelogs into Ansible +============================================= + + +If your collection is part of Ansible, use one of the following three options to include your changelog into the Ansible release changelog: + +* Use the ``antsibull-changelog`` tool. + +* If are not using this tool, include the properly formatted ``changelog.yaml`` file into your collection. See the `changlog.yaml format `_ for details. + +* Add a link to own changelogs or release notes in any format by opening an issue at https://github.com/ansible-community/ansible-build-data/ with the HTML link to that information. + +.. note:: + + For the first two options, Ansible pulls the changelog details from Galaxy so your changelogs must be included in the collection version on Galaxy that is included in the upcoming Ansible release. + +.. seealso:: + + :ref:`collections` + Learn how to install and use collections. + :ref:`collections_galaxy_meta` + Understand the collections metadata structure. + :ref:`developing_modules_general` + Learn about how to write Ansible modules + `Mailing List `_ + The development mailing list + `irc.freenode.net `_ + #ansible IRC chat channel diff --git a/docs/docsite/rst/dev_guide/developing_core.rst b/docs/docsite/rst/dev_guide/developing_core.rst new file mode 100644 index 00000000..602f9aaf --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_core.rst @@ -0,0 +1,21 @@ +*************************** +Developing ``ansible-base`` +*************************** + +Although ``ansible-base`` (the code hosted in the `ansible/ansible repository `_ on GitHub) includes a few plugins that can be swapped out via playbook directives or configuration, much of the code there is not modular. The documents here give insight into how the parts of ``ansible-base`` work together. + +.. toctree:: + :maxdepth: 1 + + developing_program_flow_modules + +.. seealso:: + + :ref:`developing_api` + Learn about the Python API for task execution + :ref:`developing_plugins` + Learn about developing plugins + `Mailing List `_ + The development mailing list + `irc.freenode.net `_ + #ansible-devel IRC chat channel diff --git a/docs/docsite/rst/dev_guide/developing_inventory.rst b/docs/docsite/rst/dev_guide/developing_inventory.rst new file mode 100644 index 00000000..26a56a36 --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_inventory.rst @@ -0,0 +1,422 @@ +.. _developing_inventory: + +**************************** +Developing dynamic inventory +**************************** + +Ansible can pull inventory information from dynamic sources, including cloud sources, by using the supplied :ref:`inventory plugins `. For details about how to pull inventory information, see :ref:`dynamic_inventory`. If the source you want is not currently covered by existing plugins, you can create your own inventory plugin as with any other plugin type. + +In previous versions, you had to create a script or program that could output JSON in the correct format when invoked with the proper arguments. +You can still use and write inventory scripts, as we ensured backwards compatibility via the :ref:`script inventory plugin ` +and there is no restriction on the programming language used. +If you choose to write a script, however, you will need to implement some features yourself such as caching, configuration management, dynamic variable and group composition, and so on. +If you use :ref:`inventory plugins ` instead, you can leverage the Ansible codebase and add these common features automatically. + +.. contents:: Topics + :local: + + +.. _inventory_sources: + +Inventory sources +================= + +Inventory sources are the input strings that inventory plugins work with. +An inventory source can be a path to a file or to a script, or it can be raw data that the plugin can interpret. + +The table below shows some examples of inventory plugins and the source types that you can pass to them with ``-i`` on the command line. + ++--------------------------------------------+-----------------------------------------+ +| Plugin | Source | ++--------------------------------------------+-----------------------------------------+ +| :ref:`host list ` | A comma-separated list of hosts | ++--------------------------------------------+-----------------------------------------+ +| :ref:`yaml ` | Path to a YAML format data file | ++--------------------------------------------+-----------------------------------------+ +| :ref:`constructed ` | Path to a YAML configuration file | ++--------------------------------------------+-----------------------------------------+ +| :ref:`ini ` | Path to an INI formatted data file | ++--------------------------------------------+-----------------------------------------+ +| :ref:`virtualbox ` | Path to a YAML configuration file | ++--------------------------------------------+-----------------------------------------+ +| :ref:`script plugin ` | Path to an executable that outputs JSON | ++--------------------------------------------+-----------------------------------------+ + + +.. _developing_inventory_inventory_plugins: + +Inventory plugins +================= + +Like most plugin types (except modules), inventory plugins must be developed in Python. They execute on the controller and should therefore adhere to the :ref:`control_node_requirements`. + +Most of the documentation in :ref:`developing_plugins` also applies here. You should read that document first for a general understanding and then come back to this document for specifics on inventory plugins. + +Normally, inventory plugins are executed at the start of a run, and before the playbooks, plays, or roles are loaded. +However, you can use the ``meta: refresh_inventory`` task to clear the current inventory and execute the inventory plugins again, and this task will generate a new inventory. + +If you use the persistent cache, inventory plugins can also use the configured cache plugin to store and retrieve data. Caching inventory avoids making repeated and costly external calls. + +.. _developing_an_inventory_plugin: + +Developing an inventory plugin +------------------------------ + +The first thing you want to do is use the base class: + +.. code-block:: python + + from ansible.plugins.inventory import BaseInventoryPlugin + + class InventoryModule(BaseInventoryPlugin): + + NAME = 'myplugin' # used internally by Ansible, it should match the file name but not required + +If the inventory plugin is in a collection, the NAME should be in the 'namespace.collection_name.myplugin' format. The base class has a couple of methods that each plugin should implement and a few helpers for parsing the inventory source and updating the inventory. + +After you have the basic plugin working, you can incorporate other features by adding more base classes: + +.. code-block:: python + + from ansible.plugins.inventory import BaseInventoryPlugin, Constructable, Cacheable + + class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable): + + NAME = 'myplugin' + +For the bulk of the work in a plugin, we mostly want to deal with 2 methods ``verify_file`` and ``parse``. + +.. _inventory_plugin_verify_file: + +verify_file method +^^^^^^^^^^^^^^^^^^ + +Ansible uses this method to quickly determine if the inventory source is usable by the plugin. The determination does not need to be 100% accurate, as there might be an overlap in what plugins can handle and by default Ansible will try the enabled plugins as per their sequence. + +.. code-block:: python + + def verify_file(self, path): + ''' return true/false if this is possibly a valid file for this plugin to consume ''' + valid = False + if super(InventoryModule, self).verify_file(path): + # base class verifies that file exists and is readable by current user + if path.endswith(('virtualbox.yaml', 'virtualbox.yml', 'vbox.yaml', 'vbox.yml')): + valid = True + return valid + +In the above example, from the :ref:`virtualbox inventory plugin `, we screen for specific file name patterns to avoid attempting to consume any valid YAML file. You can add any type of condition here, but the most common one is 'extension matching'. If you implement extension matching for YAML configuration files, the path suffix . should be accepted. All valid extensions should be documented in the plugin description. + +The following is another example that does not use a 'file' but the inventory source string itself, +from the :ref:`host list ` plugin: + +.. code-block:: python + + def verify_file(self, path): + ''' don't call base class as we don't expect a path, but a host list ''' + host_list = path + valid = False + b_path = to_bytes(host_list, errors='surrogate_or_strict') + if not os.path.exists(b_path) and ',' in host_list: + # the path does NOT exist and there is a comma to indicate this is a 'host list' + valid = True + return valid + +This method is just to expedite the inventory process and avoid unnecessary parsing of sources that are easy to filter out before causing a parse error. + +.. _inventory_plugin_parse: + +parse method +^^^^^^^^^^^^ + +This method does the bulk of the work in the plugin. +It takes the following parameters: + + * inventory: inventory object with existing data and the methods to add hosts/groups/variables to inventory + * loader: Ansible's DataLoader. The DataLoader can read files, auto load JSON/YAML and decrypt vaulted data, and cache read files. + * path: string with inventory source (this is usually a path, but is not required) + * cache: indicates whether the plugin should use or avoid caches (cache plugin and/or loader) + + +The base class does some minimal assignment for reuse in other methods. + +.. code-block:: python + + def parse(self, inventory, loader, path, cache=True): + + self.loader = loader + self.inventory = inventory + self.templar = Templar(loader=loader) + +It is up to the plugin now to parse the provided inventory source and translate it into Ansible inventory. +To facilitate this, the example below uses a few helper functions: + +.. code-block:: python + + NAME = 'myplugin' + + def parse(self, inventory, loader, path, cache=True): + + # call base method to ensure properties are available for use with other helper methods + super(InventoryModule, self).parse(inventory, loader, path, cache) + + # this method will parse 'common format' inventory sources and + # update any options declared in DOCUMENTATION as needed + config = self._read_config_data(path) + + # if NOT using _read_config_data you should call set_options directly, + # to process any defined configuration for this plugin, + # if you don't define any options you can skip + #self.set_options() + + # example consuming options from inventory source + mysession = apilib.session(user=self.get_option('api_user'), + password=self.get_option('api_pass'), + server=self.get_option('api_server') + ) + + + # make requests to get data to feed into inventory + mydata = mysession.getitall() + + #parse data and create inventory objects: + for colo in mydata: + for server in mydata[colo]['servers']: + self.inventory.add_host(server['name']) + self.inventory.set_variable(server['name'], 'ansible_host', server['external_ip']) + +The specifics will vary depending on API and structure returned. Remember that if you get an inventory source error or any other issue, you should ``raise AnsibleParserError`` to let Ansible know that the source was invalid or the process failed. + +For examples on how to implement an inventory plugin, see the source code here: +`lib/ansible/plugins/inventory `_. + +.. _inventory_plugin_caching: + +inventory cache +^^^^^^^^^^^^^^^ + +To cache the inventory, extend the inventory plugin documentation with the inventory_cache documentation fragment and use the Cacheable base class. + +.. code-block:: yaml + + extends_documentation_fragment: + - inventory_cache + +.. code-block:: python + + class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable): + + NAME = 'myplugin' + +Next, load the cache plugin specified by the user to read from and update the cache. If your inventory plugin uses YAML-based configuration files and the ``_read_config_data`` method, the cache plugin is loaded within that method. If your inventory plugin does not use ``_read_config_data``, you must load the cache explicitly with ``load_cache_plugin``. + +.. code-block:: python + + NAME = 'myplugin' + + def parse(self, inventory, loader, path, cache=True): + super(InventoryModule, self).parse(inventory, loader, path) + + self.load_cache_plugin() + +Before using the cache plugin, you must retrieve a unique cache key by using the ``get_cache_key`` method. This task needs to be done by all inventory modules using the cache, so that you don't use/overwrite other parts of the cache. + +.. code-block:: python + + def parse(self, inventory, loader, path, cache=True): + super(InventoryModule, self).parse(inventory, loader, path) + + self.load_cache_plugin() + cache_key = self.get_cache_key(path) + +Now that you've enabled caching, loaded the correct plugin, and retrieved a unique cache key, you can set up the flow of data between the cache and your inventory using the ``cache`` parameter of the ``parse`` method. This value comes from the inventory manager and indicates whether the inventory is being refreshed (such as via ``--flush-cache`` or the meta task ``refresh_inventory``). Although the cache shouldn't be used to populate the inventory when being refreshed, the cache should be updated with the new inventory if the user has enabled caching. You can use ``self._cache`` like a dictionary. The following pattern allows refreshing the inventory to work in conjunction with caching. + +.. code-block:: python + + def parse(self, inventory, loader, path, cache=True): + super(InventoryModule, self).parse(inventory, loader, path) + + self.load_cache_plugin() + cache_key = self.get_cache_key(path) + + # cache may be True or False at this point to indicate if the inventory is being refreshed + # get the user's cache option too to see if we should save the cache if it is changing + user_cache_setting = self.get_option('cache') + + # read if the user has caching enabled and the cache isn't being refreshed + attempt_to_read_cache = user_cache_setting and cache + # update if the user has caching enabled and the cache is being refreshed; update this value to True if the cache has expired below + cache_needs_update = user_cache_setting and not cache + + # attempt to read the cache if inventory isn't being refreshed and the user has caching enabled + if attempt_to_read_cache: + try: + results = self._cache[cache_key] + except KeyError: + # This occurs if the cache_key is not in the cache or if the cache_key expired, so the cache needs to be updated + cache_needs_update = True + + if cache_needs_update: + results = self.get_inventory() + + # set the cache + self._cache[cache_key] = results + + self.populate(results) + +After the ``parse`` method is complete, the contents of ``self._cache`` is used to set the cache plugin if the contents of the cache have changed. + +You have three other cache methods available: + - ``set_cache_plugin`` forces the cache plugin to be set with the contents of ``self._cache``, before the ``parse`` method completes + - ``update_cache_if_changed`` sets the cache plugin only if ``self._cache`` has been modified, before the ``parse`` method completes + - ``clear_cache`` flushes the cache, ultimately by calling the cache plugin's ``flush()`` method, whose implementation is dependent upon the particular cache plugin in use. Note that if the user is using the same cache backend for facts and inventory, both will get flushed. To avoid this, the user can specify a distinct cache backend in their inventory plugin configuration. + +.. _inventory_source_common_format: + +Common format for inventory sources +----------------------------------- + +To simplify development, most plugins use a standard YAML-based configuration file as the inventory source. The file has only one required field ``plugin``, which should contain the name of the plugin that is expected to consume the file. +Depending on other common features used, you might need other fields, and you can add custom options in each plugin as required. +For example, if you use the integrated caching, ``cache_plugin``, ``cache_timeout`` and other cache-related fields could be present. + +.. _inventory_development_auto: + +The 'auto' plugin +----------------- + +From Ansible 2.5 onwards, we include the :ref:`auto inventory plugin ` and enable it by default. If the ``plugin`` field in your standard configuration file matches the name of your inventory plugin, the ``auto`` inventory plugin will load your plugin. The 'auto' plugin makes it easier to use your plugin without having to update configurations. + + +.. _inventory_scripts: +.. _developing_inventory_scripts: + +Inventory scripts +================= + +Even though we now have inventory plugins, we still support inventory scripts, not only for backwards compatibility but also to allow users to leverage other programming languages. + + +.. _inventory_script_conventions: + +Inventory script conventions +---------------------------- + +Inventory scripts must accept the ``--list`` and ``--host `` arguments. Although other arguments are allowed, Ansible will not use them. +Such arguments might still be useful for executing the scripts directly. + +When the script is called with the single argument ``--list``, the script must output to stdout a JSON-encoded hash or +dictionary that contains all the groups to be managed. Each group's value should be either a hash or dictionary containing a list of each host, any child groups, and potential group variables, or simply a list of hosts:: + + + { + "group001": { + "hosts": ["host001", "host002"], + "vars": { + "var1": true + }, + "children": ["group002"] + }, + "group002": { + "hosts": ["host003","host004"], + "vars": { + "var2": 500 + }, + "children":[] + } + + } + +If any of the elements of a group are empty, they may be omitted from the output. + +When called with the argument ``--host `` (where is a host from above), the script must print either an empty JSON hash/dictionary, or a hash/dictionary of variables to make them available to templates and playbooks. For example:: + + + { + "VAR001": "VALUE", + "VAR002": "VALUE", + } + +Printing variables is optional. If the script does not print variables, it should print an empty hash or dictionary. + +.. _inventory_script_tuning: + +Tuning the external inventory script +------------------------------------ + +.. versionadded:: 1.3 + +The stock inventory script system mentioned above works for all versions of Ansible, but calling ``--host`` for every host can be rather inefficient, especially if it involves API calls to a remote subsystem. + +To avoid this inefficiency, if the inventory script returns a top-level element called "_meta", it is possible to return all the host variables in a single script execution. When this meta element contains a value for "hostvars", the inventory script will not be invoked with ``--host`` for each host. This behavior results in a significant performance increase for large numbers of hosts. + +The data to be added to the top-level JSON dictionary looks like this:: + + { + + # results of inventory script as above go here + # ... + + "_meta": { + "hostvars": { + "host001": { + "var001" : "value" + }, + "host002": { + "var002": "value" + } + } + } + } + +To satisfy the requirements of using ``_meta``, to prevent ansible from calling your inventory with ``--host`` you must at least populate ``_meta`` with an empty ``hostvars`` dictionary. +For example:: + + { + + # results of inventory script as above go here + # ... + + "_meta": { + "hostvars": {} + } + } + + +.. _replacing_inventory_ini_with_dynamic_provider: + +If you intend to replace an existing static inventory file with an inventory script, it must return a JSON object which contains an 'all' group that includes every host in the inventory as a member and every group in the inventory as a child. It should also include an 'ungrouped' group which contains all hosts which are not members of any other group. +A skeleton example of this JSON object is: + +.. code-block:: json + + { + "_meta": { + "hostvars": {} + }, + "all": { + "children": [ + "ungrouped" + ] + }, + "ungrouped": { + "children": [ + ] + } + } + +An easy way to see how this should look is using :ref:`ansible-inventory`, which also supports ``--list`` and ``--host`` parameters like an inventory script would. + +.. seealso:: + + :ref:`developing_api` + Python API to Playbooks and Ad Hoc Task Execution + :ref:`developing_modules_general` + Get started with developing a module + :ref:`developing_plugins` + How to develop plugins + `Ansible Tower `_ + REST API endpoint and GUI for Ansible, syncs with dynamic inventory + `Development Mailing List `_ + Mailing list for development topics + `irc.freenode.net `_ + #ansible IRC chat channel diff --git a/docs/docsite/rst/dev_guide/developing_locally.rst b/docs/docsite/rst/dev_guide/developing_locally.rst new file mode 100644 index 00000000..4c7f6b71 --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_locally.rst @@ -0,0 +1,105 @@ +.. _using_local_modules_and_plugins: +.. _developing_locally: + +********************************** +Adding modules and plugins locally +********************************** + +The easiest, quickest, and the most popular way to extend Ansible is to use a local module or a plugin. You can create them or copy existing ones for local use. You can store a local module or plugin on your Ansible control node and share it with your team or organization. You can also share a local plugin or module by including it in a collection or embedding it in a role, then publishing the collection or role on Ansible Galaxy. If you are using roles on Ansible Galaxy, then you are already using local modules and plugins without realizing it. + +If you are using an existing module or plugin but Ansible can't find it, this page is all you need. However, if you want to create a plugin or a module, go to :ref:`developing_plugins` and :ref:`developing_modules_general` topics and then return to this page to know how to add it locally. + +Extending Ansible with local modules and plugins offers lots of shortcuts such as: + +* You can copy other people's modules and plugins. +* When writing a new module, you can choose any programming language you like. +* You do not have to clone any repositories. +* You do not have to open a pull request. +* You do not have to add tests (though we recommend that you do!). + +To save a local module or plugin such that Ansible can find and use it, add the module or plugin in the appropriate directory (the directories are specified in later parts of this topic). + +.. contents:: + :local: + +.. _modules_vs_plugins: + +Modules and plugins: what is the difference? +============================================ +If you are looking to add local functionality to Ansible, you might wonder whether you need a module or a plugin. Here is a quick overview to help you decide between the two: + +* Modules are reusable, standalone scripts that can be used by the Ansible API, the :command:`ansible` command, or the :command:`ansible-playbook` command. Modules provide a defined interface. Each module accepts arguments and returns information to Ansible by printing a JSON string to stdout before exiting. Modules execute on the target system (usually that means on a remote system) in separate processes. +* :ref:`Plugins ` augment Ansible's core functionality and execute on the control node within the ``/usr/bin/ansible`` process. Plugins offer options and extensions for the core features of Ansible - transforming data, logging output, connecting to inventory, and more. + +.. _local_modules: + +Adding a module locally +======================= +Ansible automatically loads all executable files found in certain directories as modules. + +For local modules, use the name of the file as the module name: for example, if the module file is ``~/.ansible/plugins/modules/local_users.py``, use ``local_users`` as the module name. + +To load your local modules automatically and make them available to all playbooks and roles, add them in any of these locations: + +* any directory added to the ``ANSIBLE_LIBRARY`` environment variable (``$ANSIBLE_LIBRARY`` takes a colon-separated list like ``$PATH``) +* ``~/.ansible/plugins/modules/`` +* ``/usr/share/ansible/plugins/modules/`` + +After you save your module file in one of these locations, Ansible loads it and you can use it in any local task, playbook, or role. + +To confirm that ``my_custom_module`` is available: + +* type ``ansible localhost -m my_custom_module``. You should see the output for that module. + +or + +* type ``ansible-doc -t module my_custom_module``. You should see the documentation for that module. + +.. note:: + + Currently, the ``ansible-doc`` command can parse module documentation only from modules written in Python. If you have a module written in a programming language other than Python, please write the documentation in a Python file adjacent to the module file. + +You can limit the availability of your local module. If you want to use a local module only with selected playbooks or only with a single role, load it in one of the following locations: + +* In a selected playbook or playbooks: Store the module in a subdirectory called ``library`` in the directory that contains those playbooks. +* In a single role: Store the module in a subdirectory called ``library`` within that role. + +.. _distributing_plugins: +.. _local_plugins: + +Adding a plugin locally +======================= +Ansible loads plugins automatically too, and loads each type of plugin separately from a directory named for the type of plugin. Here's the full list of plugin directory names: + + * action_plugins* + * cache_plugins + * callback_plugins + * connection_plugins + * filter_plugins* + * inventory_plugins + * lookup_plugins + * shell_plugins + * strategy_plugins + * test_plugins* + * vars_plugins + +.. note:: + + After you add the plugins and verify that they are available for use, you can see the documentation for all the plugins except for the ones marked with an asterisk (*) above. + +To load your local plugins automatically, add them in any of these locations: + +* any directory added to the relevant ``ANSIBLE_plugin_type_PLUGINS`` environment variable (these variables, such as ``$ANSIBLE_INVENTORY_PLUGINS`` and ``$ANSIBLE_VARS_PLUGINS`` take colon-separated lists like ``$PATH``) +* the directory named for the correct ``plugin_type`` within ``~/.ansible/plugins/`` - for example, ``~/.ansible/plugins/callback`` +* the directory named for the correct ``plugin_type`` within ``/usr/share/ansible/plugins/`` - for example, ``/usr/share/ansible/plugins/action`` + +After your plugin file is in one of these locations, Ansible loads it and you can use it in any local module, task, playbook, or role. Alternatively, you can edit your ``ansible.cfg`` file to add directories that contain local plugins. For details about adding directories of local plugins, see :ref:`ansible_configuration_settings`. + +To confirm that ``plugins/plugin_type/my_custom_plugin`` is available: + +* type ``ansible-doc -t my_custom_lookup_plugin``. For example, ``ansible-doc -t lookup my_custom_lookup_plugin``. You should see the documentation for that plugin. This works for all plugin types except the ones marked with ``*`` in the list above - see :ref:`ansible-doc` for more details. + +You can limit the availability of your local plugin. If you want to use a local plugin only with selected playbooks or only with a single role, load it in one of the following locations: + +* In a selected playbook or playbooks: Store the plugin in a subdirectory for the correct ``plugin_type`` (for example, ``callback_plugins`` or ``inventory_plugins``) in the directory that contains the playbooks. +* In a single role: Store the plugin in a subdirectory for the correct ``plugin_type`` (for example, ``cache_plugins`` or ``strategy_plugins``) within that role. When shipped as part of a role, the plugin is available as soon as the role is executed. diff --git a/docs/docsite/rst/dev_guide/developing_module_utilities.rst b/docs/docsite/rst/dev_guide/developing_module_utilities.rst new file mode 100644 index 00000000..dfeaef55 --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_module_utilities.rst @@ -0,0 +1,69 @@ +.. _developing_module_utilities: + +************************************* +Using and developing module utilities +************************************* + +Ansible provides a number of module utilities, or snippets of shared code, that +provide helper functions you can use when developing your own modules. The +``basic.py`` module utility provides the main entry point for accessing the +Ansible library, and all Python Ansible modules must import something from +``ansible.module_utils``. A common option is to import ``AnsibleModule``:: + + from ansible.module_utils.basic import AnsibleModule + +The ``ansible.module_utils`` namespace is not a plain Python package: it is +constructed dynamically for each task invocation, by extracting imports and +resolving those matching the namespace against a :ref:`search path ` derived from the +active configuration. + +To reduce the maintenance burden in a collection or in local modules, you can extract +duplicated code into one or more module utilities and import them into your modules. For example, if you have your own custom modules that import a ``my_shared_code`` library, you can place that into a ``./module_utils/my_shared_code.py`` file like this:: + + from ansible.module_utils.my_shared_code import MySharedCodeClient + +When you run ``ansible-playbook``, Ansible will merge any files in your local ``module_utils`` directories into the ``ansible.module_utils`` namespace in the order defined by the :ref:`Ansible search path `. + +Naming and finding module utilities +=================================== + +You can generally tell what a module utility does from its name and/or its location. Generic utilities (shared code used by many different kinds of modules) live in the main ansible/ansible codebase, in the ``common`` subdirectory or in the root directory of ``lib/ansible/module_utils``. Utilities used by a particular set of modules generally live in the same collection as those modules. For example: + +* ``lib/ansible/module_utils/urls.py`` contains shared code for parsing URLs +* ``openstack.cloud.plugins.module_utils.openstack.py`` contains utilities for modules that work with OpenStack instances +* ``ansible.netcommon.plugins.module_utils.network.common.config.py`` contains utility functions for use by networking modules + +Following this pattern with your own module utilities makes everything easy to find and use. + +.. _standard_mod_utils: + +Standard module utilities +========================= + +Ansible ships with an extensive library of ``module_utils`` files. You can find the module utility source code in the ``lib/ansible/module_utils`` directory under your main Ansible path. We describe the most widely used utilities below. For more details on any specific module utility, please see the `source code for module_utils `_. + +.. include:: shared_snippets/licensing.txt + +- ``api.py`` - Supports generic API modules +- ``basic.py`` - General definitions and helper utilities for Ansible modules +- ``common/dict_transformations.py`` - Helper functions for dictionary transformations +- ``common/file.py`` - Helper functions for working with files +- ``common/text/`` - Helper functions for converting and formatting text +- ``common/parameters.py`` - Helper functions for dealing with module parameters +- ``common/sys_info.py`` - Functions for getting distribution and platform information +- ``common/validation.py`` - Helper functions for validating module parameters against a module argument spec +- ``facts/`` - Directory of utilities for modules that return facts. See `PR 23012 `_ for more information +- ``json_utils.py`` - Utilities for filtering unrelated output around module JSON output, like leading and trailing lines +- ``powershell/`` - Directory of definitions and helper functions for Windows PowerShell modules +- ``pycompat24.py`` - Exception workaround for Python 2.4 +- ``service.py`` - Utilities to enable modules to work with Linux services (placeholder, not in use) +- ``six/__init__.py`` - Bundled copy of the `Six Python library `_ to aid in writing code compatible with both Python 2 and Python 3 +- ``splitter.py`` - String splitting and manipulation utilities for working with Jinja2 templates +- ``urls.py`` - Utilities for working with http and https requests + +Several commonly-used utilities migrated to collections in Ansible 2.10, including: + +- ``ismount.py`` migrated to ``ansible.posix.plugins.module_utils.mount.py`` - Single helper function that fixes os.path.ismount +- ``known_hosts.py`` migrated to ``community.general.plugins.module_utils.known_hosts.py`` - utilities for working with known_hosts file + +For a list of migrated content with destination collections, see https://github.com/ansible/ansible/blob/devel/lib/ansible/config/ansible_builtin_runtime.yml. diff --git a/docs/docsite/rst/dev_guide/developing_modules.rst b/docs/docsite/rst/dev_guide/developing_modules.rst new file mode 100644 index 00000000..5cfcf15c --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_modules.rst @@ -0,0 +1,51 @@ +.. _developing_modules: +.. _module_dev_should_you: + +**************************** +Should you develop a module? +**************************** + +Developing Ansible modules is easy, but often it is not necessary. Before you start writing a new module, ask: + +1. Does a similar module already exist? + +An existing module may cover the functionality you want. Ansible collections include thousands of modules. Search our :ref:`list of included collections ` or `Ansible Galaxy `_ to see if an existing module does what you need. + +2. Should you use or develop an action plugin instead of a module? + +An action plugin may be the best way to get the functionality you want. Action plugins run on the control node instead of on the managed node, and their functionality is available to all modules. For more information about developing plugins, read the :ref:`developing plugins page `. + +3. Should you use a role instead of a module? + +A combination of existing modules may cover the functionality you want. You can write a role for this type of use case. Check out the :ref:`roles documentation`. + +4. Should you create a collection instead of a single module? + +The functionality you want may be too large for a single module. If you want to connect Ansible to a new cloud provider, database, or network platform, you may need to :ref:`develop a new collection`. + +* Each module should have a concise and well defined functionality. Basically, follow the UNIX philosophy of doing one thing well. + +* A module should not require that a user know all the underlying options of an API/tool to be used. For instance, if the legal values for a required module parameter cannot be documented, that's a sign that the module would be rejected. + +* Modules should typically encompass much of the logic for interacting with a resource. A lightweight wrapper around an API that does not contain much logic would likely cause users to offload too much logic into a playbook, and for this reason the module would be rejected. Instead try creating multiple modules for interacting with smaller individual pieces of the API. + +If your use case isn't covered by an existing module, an action plugin, or a role, and you don't need to create multiple modules, then you're ready to start developing a new module. Choose from the topics below for next steps: + +* I want to :ref:`get started on a new module `. +* I want to review :ref:`tips and conventions for developing good modules `. +* I want to :ref:`write a Windows module `. +* I want :ref:`an overview of Ansible's architecture `. +* I want to :ref:`document my module `. +* I want to :ref:`contribute my module back to Ansible Core `. +* I want to :ref:`add unit and integration tests to my module `. +* I want to :ref:`add Python 3 support to my module `. +* I want to :ref:`write multiple modules `. + +.. seealso:: + + :ref:`list_of_collections` + Browse existing collections, modules, and plugins + `Mailing List `_ + Development mailing list + `irc.freenode.net `_ + #ansible IRC chat channel diff --git a/docs/docsite/rst/dev_guide/developing_modules_best_practices.rst b/docs/docsite/rst/dev_guide/developing_modules_best_practices.rst new file mode 100644 index 00000000..19787f69 --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_modules_best_practices.rst @@ -0,0 +1,177 @@ +.. _developing_modules_best_practices: +.. _module_dev_conventions: + +******************************* +Conventions, tips, and pitfalls +******************************* + +.. contents:: Topics + :local: + +As you design and develop modules, follow these basic conventions and tips for clean, usable code: + +Scoping your module(s) +====================== + +Especially if you want to contribute your module(s) to an existing Ansible Collection, make sure each module includes enough logic and functionality, but not too much. If these guidelines seem confusing, consider :ref:`whether you really need to write a module ` at all. + +* Each module should have a concise and well-defined functionality. Basically, follow the UNIX philosophy of doing one thing well. +* Do not add ``get``, ``list`` or ``info`` state options to an existing module - create a new ``_info`` or ``_facts`` module. +* Modules should not require that a user know all the underlying options of an API/tool to be used. For instance, if the legal values for a required module option cannot be documented, the module does not belong in Ansible Core. +* Modules should encompass much of the logic for interacting with a resource. A lightweight wrapper around a complex API forces users to offload too much logic into their playbooks. If you want to connect Ansible to a complex API, :ref:`create multiple modules ` that interact with smaller individual pieces of the API. +* Avoid creating a module that does the work of other modules; this leads to code duplication and divergence, and makes things less uniform, unpredictable and harder to maintain. Modules should be the building blocks. If you are asking 'how can I have a module execute other modules' ... you want to write a role. + +Designing module interfaces +=========================== + +* If your module is addressing an object, the option for that object should be called ``name`` whenever possible, or accept ``name`` as an alias. +* Modules accepting boolean status should accept ``yes``, ``no``, ``true``, ``false``, or anything else a user may likely throw at them. The AnsibleModule common code supports this with ``type='bool'``. +* Avoid ``action``/``command``, they are imperative and not declarative, there are other ways to express the same thing. + +General guidelines & tips +========================= + +* Each module should be self-contained in one file, so it can be auto-transferred by ``ansible-base``. +* Module name MUST use underscores instead of hyphens or spaces as a word separator. Using hyphens and spaces will prevent ``ansible-base`` from importing your module. +* Always use the ``hacking/test-module.py`` script when developing modules - it will warn you about common pitfalls. +* If you have a local module that returns information specific to your installations, a good name for this module is ``site_info``. +* Eliminate or minimize dependencies. If your module has dependencies, document them at the top of the module file and raise JSON error messages when dependency import fails. +* Don't write to files directly; use a temporary file and then use the ``atomic_move`` function from ``ansible.module_utils.basic`` to move the updated temporary file into place. This prevents data corruption and ensures that the correct context for the file is kept. +* Avoid creating caches. Ansible is designed without a central server or authority, so you cannot guarantee it will not run with different permissions, options or locations. If you need a central authority, have it on top of Ansible (for example, using bastion/cm/ci server or tower); do not try to build it into modules. +* If you package your module(s) in an RPM, install the modules on the control machine in ``/usr/share/ansible``. Packaging modules in RPMs is optional. + +Functions and Methods +===================== + +* Each function should be concise and should describe a meaningful amount of work. +* "Don't repeat yourself" is generally a good philosophy. +* Function names should use underscores: ``my_function_name``. +* The name of each function should describe what the function does. +* Each function should have a docstring. +* If your code is too nested, that's usually a sign the loop body could benefit from being a function. Parts of our existing code are not the best examples of this at times. + +Python tips +=========== + +* When fetching URLs, use ``fetch_url`` or ``open_url`` from ``ansible.module_utils.urls``. Do not use ``urllib2``, which does not natively verify TLS certificates and so is insecure for https. +* Include a ``main`` function that wraps the normal execution. +* Call your ``main`` function from a conditional so you can import it into unit tests - for example: + +.. code-block:: python + + if __name__ == '__main__': + main() + +.. _shared_code: + +Importing and using shared code +=============================== + +* Use shared code whenever possible - don't reinvent the wheel. Ansible offers the ``AnsibleModule`` common Python code, plus :ref:`utilities ` for many common use cases and patterns. You can also create documentation fragments for docs that apply to multiple modules. +* Import ``ansible.module_utils`` code in the same place as you import other libraries. +* Do NOT use wildcards (*) for importing other python modules; instead, list the function(s) you are importing (for example, ``from some.other_python_module.basic import otherFunction``). +* Import custom packages in ``try``/``except``, capture any import errors, and handle them with ``fail_json()`` in ``main()``. For example: + +.. code-block:: python + + import traceback + + from ansible.module_utils.basic import missing_required_lib + + LIB_IMP_ERR = None + try: + import foo + HAS_LIB = True + except: + HAS_LIB = False + LIB_IMP_ERR = traceback.format_exc() + + +Then in ``main()``, just after the argspec, do + +.. code-block:: python + + if not HAS_LIB: + module.fail_json(msg=missing_required_lib("foo"), + exception=LIB_IMP_ERR) + + +And document the dependency in the ``requirements`` section of your module's :ref:`documentation_block`. + +.. _module_failures: + +Handling module failures +======================== + +When your module fails, help users understand what went wrong. If you are using the ``AnsibleModule`` common Python code, the ``failed`` element will be included for you automatically when you call ``fail_json``. For polite module failure behavior: + +* Include a key of ``failed`` along with a string explanation in ``msg``. If you don't do this, Ansible will use standard return codes: 0=success and non-zero=failure. +* Don't raise a traceback (stacktrace). Ansible can deal with stacktraces and automatically converts anything unparseable into a failed result, but raising a stacktrace on module failure is not user-friendly. +* Do not use ``sys.exit()``. Use ``fail_json()`` from the module object. + +Handling exceptions (bugs) gracefully +===================================== + +* Validate upfront--fail fast and return useful and clear error messages. +* Use defensive programming--use a simple design for your module, handle errors gracefully, and avoid direct stacktraces. +* Fail predictably--if we must fail, do it in a way that is the most expected. Either mimic the underlying tool or the general way the system works. +* Give out a useful message on what you were doing and add exception messages to that. +* Avoid catchall exceptions, they are not very useful unless the underlying API gives very good error messages pertaining the attempted action. + +.. _module_output: + +Creating correct and informative module output +============================================== + +Modules must output valid JSON only. Follow these guidelines for creating correct, useful module output: + +* Make your top-level return type a hash (dictionary). +* Nest complex return values within the top-level hash. +* Incorporate any lists or simple scalar values within the top-level return hash. +* Do not send module output to standard error, because the system will merge standard out with standard error and prevent the JSON from parsing. +* Capture standard error and return it as a variable in the JSON on standard out. This is how the command module is implemented. +* Never do ``print("some status message")`` in a module, because it will not produce valid JSON output. +* Always return useful data, even when there is no change. +* Be consistent about returns (some modules are too random), unless it is detrimental to the state/action. +* Make returns reusable--most of the time you don't want to read it, but you do want to process it and re-purpose it. +* Return diff if in diff mode. This is not required for all modules, as it won't make sense for certain ones, but please include it when applicable. +* Enable your return values to be serialized as JSON with Python's standard `JSON encoder and decoder `_ library. Basic python types (strings, int, dicts, lists, and so on) are serializable. +* Do not return an object using exit_json(). Instead, convert the fields you need from the object into the fields of a dictionary and return the dictionary. +* Results from many hosts will be aggregated at once, so your module should return only relevant output. Returning the entire contents of a log file is generally bad form. + +If a module returns stderr or otherwise fails to produce valid JSON, the actual output will still be shown in Ansible, but the command will not succeed. + +.. _module_conventions: + +Following Ansible conventions +============================= + +Ansible conventions offer a predictable user interface across all modules, playbooks, and roles. To follow Ansible conventions in your module development: + +* Use consistent names across modules (yes, we have many legacy deviations - don't make the problem worse!). +* Use consistent options (arguments) within your module(s). +* Do not use 'message' or 'syslog_facility' as an option name, because this is used internally by Ansible. +* Normalize options with other modules - if Ansible and the API your module connects to use different names for the same option, add aliases to your options so the user can choose which names to use in tasks and playbooks. +* Return facts from ``*_facts`` modules in the ``ansible_facts`` field of the :ref:`result dictionary` so other modules can access them. +* Implement ``check_mode`` in all ``*_info`` and ``*_facts`` modules. Playbooks which conditionalize based on fact information will only conditionalize correctly in ``check_mode`` if the facts are returned in ``check_mode``. Usually you can add ``supports_check_mode=True`` when instantiating ``AnsibleModule``. +* Use module-specific environment variables. For example, if you use the helpers in ``module_utils.api`` for basic authentication with ``module_utils.urls.fetch_url()`` and you fall back on environment variables for default values, use a module-specific environment variable like :code:`API__USERNAME` to avoid conflicts between modules. +* Keep module options simple and focused - if you're loading a lot of choices/states on an existing option, consider adding a new, simple option instead. +* Keep options small when possible. Passing a large data structure to an option might save us a few tasks, but it adds a complex requirement that we cannot easily validate before passing on to the module. +* If you want to pass complex data to an option, write an expert module that allows this, along with several smaller modules that provide a more 'atomic' operation against the underlying APIs and services. Complex operations require complex data. Let the user choose whether to reflect that complexity in tasks and plays or in vars files. +* Implement declarative operations (not CRUD) so the user can ignore existing state and focus on final state. For example, use ``started/stopped``, ``present/absent``. +* Strive for a consistent final state (aka idempotency). If running your module twice in a row against the same system would result in two different states, see if you can redesign or rewrite to achieve consistent final state. If you can't, document the behavior and the reasons for it. +* Provide consistent return values within the standard Ansible return structure, even if NA/None are used for keys normally returned under other options. +* Follow additional guidelines that apply to families of modules if applicable. For example, AWS modules should follow the :ref:`Amazon development checklist `. + + +Module Security +=============== + +* Avoid passing user input from the shell. +* Always check return codes. +* You must always use ``module.run_command``, not ``subprocess`` or ``Popen`` or ``os.system``. +* Avoid using the shell unless absolutely necessary. +* If you must use the shell, you must pass ``use_unsafe_shell=True`` to ``module.run_command``. +* If any variables in your module can come from user input with ``use_unsafe_shell=True``, you must wrap them with ``pipes.quote(x)``. +* When fetching URLs, use ``fetch_url`` or ``open_url`` from ``ansible.module_utils.urls``. Do not use ``urllib2``, which does not natively verify TLS certificates and so is insecure for https. +* Sensitive values marked with ``no_log=True`` will automatically have that value stripped from module return values. If your module could return these sensitive values as part of a dictionary key name, you should call the ``ansible.module_utils.basic.sanitize_keys()`` function to strip the values from the keys. See the ``uri`` module for an example. diff --git a/docs/docsite/rst/dev_guide/developing_modules_checklist.rst b/docs/docsite/rst/dev_guide/developing_modules_checklist.rst new file mode 100644 index 00000000..492b6015 --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_modules_checklist.rst @@ -0,0 +1,46 @@ +.. _developing_modules_checklist: +.. _module_contribution: + +********************************************************** +Contributing your module to an existing Ansible collection +********************************************************** + +If you want to contribute a module to an existing collection, you must meet the community's objective and subjective requirements. Please read the details below, and also review our :ref:`tips for module development `. + +Modules accepted into certain collections are included in every Ansible release on PyPI. However, contributing to one of these collections is not the only way to distribute a module - you can :ref:`create your own collection `, embed modules in roles on Galaxy or simply share copies of your module code for :ref:`local use `. + +Contributing modules: objective requirements +=============================================== + +To contribute a module to most Ansible collections, you must: + +* write your module in either Python or Powershell for Windows +* use the ``AnsibleModule`` common code +* support Python 2.6 and Python 3.5 - if your module cannot support Python 2.6, explain the required minimum Python version and rationale in the requirements section in ``DOCUMENTATION`` +* use proper :ref:`Python 3 syntax ` +* follow `PEP 8 `_ Python style conventions - see :ref:`testing_pep8` for more information +* license your module under the GPL license (GPLv3 or later) +* understand the :ref:`license agreement `, which applies to all contributions +* conform to Ansible's :ref:`formatting and documentation ` standards +* include comprehensive :ref:`tests ` for your module +* minimize module dependencies +* support :ref:`check_mode ` if possible +* ensure your code is readable +* if a module is named ``_facts``, it should be because its main purpose is returning ``ansible_facts``. Do not name modules that do not do this with ``_facts``. Only use ``ansible_facts`` for information that is specific to the host machine, for example network interfaces and their configuration, which operating system and which programs are installed. +* Modules that query/return general information (and not ``ansible_facts``) should be named ``_info``. General information is non-host specific information, for example information on online/cloud services (you can access different accounts for the same online service from the same host), or information on VMs and containers accessible from the machine. + +Additional requirements may apply for certain collections. Review the individual collection repositories for more information. + +Please make sure your module meets these requirements before you submit your PR/proposal. If you have questions, reach out via `Ansible's IRC chat channel `_ or the `Ansible development mailing list `_. + +Contributing to Ansible: subjective requirements +================================================ + +If your module meets these objective requirements, collection maintainers will review your code to see if they think it's clear, concise, secure, and maintainable. They will consider whether your module provides a good user experience, helpful error messages, reasonable defaults, and more. This process is subjective, with no exact standards for acceptance. For the best chance of getting your module accepted, follow our :ref:`tips for module development `. + +Other checklists +================ + +* :ref:`Tips for module development `. +* :ref:`Amazon development checklist `. +* :ref:`Windows development checklist `. diff --git a/docs/docsite/rst/dev_guide/developing_modules_documenting.rst b/docs/docsite/rst/dev_guide/developing_modules_documenting.rst new file mode 100644 index 00000000..096e9f17 --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_modules_documenting.rst @@ -0,0 +1,442 @@ +.. _developing_modules_documenting: +.. _module_documenting: + +******************************* +Module format and documentation +******************************* + +If you want to contribute your module to most Ansible collections, you must write your module in Python and follow the standard format described below. (Unless you're writing a Windows module, in which case the :ref:`Windows guidelines ` apply.) In addition to following this format, you should review our :ref:`submission checklist `, :ref:`programming tips `, and :ref:`strategy for maintaining Python 2 and Python 3 compatibility `, as well as information about :ref:`testing ` before you open a pull request. + +Every Ansible module written in Python must begin with seven standard sections in a particular order, followed by the code. The sections in order are: + +.. contents:: + :depth: 1 + :local: + +.. note:: Why don't the imports go first? + + Keen Python programmers may notice that contrary to PEP 8's advice we don't put ``imports`` at the top of the file. This is because the ``DOCUMENTATION`` through ``RETURN`` sections are not used by the module code itself; they are essentially extra docstrings for the file. The imports are placed after these special variables for the same reason as PEP 8 puts the imports after the introductory comments and docstrings. This keeps the active parts of the code together and the pieces which are purely informational apart. The decision to exclude E402 is based on readability (which is what PEP 8 is about). Documentation strings in a module are much more similar to module level docstrings, than code, and are never utilized by the module itself. Placing the imports below this documentation and closer to the code, consolidates and groups all related code in a congruent manner to improve readability, debugging and understanding. + +.. warning:: **Copy old modules with care!** + + Some older Ansible modules have ``imports`` at the bottom of the file, ``Copyright`` notices with the full GPL prefix, and/or ``DOCUMENTATION`` fields in the wrong order. These are legacy files that need updating - do not copy them into new modules. Over time we are updating and correcting older modules. Please follow the guidelines on this page! + +.. _shebang: + +Python shebang & UTF-8 coding +=============================== + +Begin your Ansible module with ``#!/usr/bin/python`` - this "shebang" allows ``ansible_python_interpreter`` to work. Follow the shebang immediately with ``# -*- coding: utf-8 -*-`` to clarify that the file is UTF-8 encoded. + +.. _copyright: + +Copyright and license +===================== + +After the shebang and UTF-8 coding, add a `copyright line `_ with the original copyright holder and a license declaration. The license declaration should be ONLY one line, not the full GPL prefix.: + +.. code-block:: python + + #!/usr/bin/python + # -*- coding: utf-8 -*- + + # Copyright: (c) 2018, Terry Jones + # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +Major additions to the module (for instance, rewrites) may add additional copyright lines. Any legal review will include the source control history, so an exhaustive copyright header is not necessary. +Please do not edit the existing copyright year. This simplifies project administration and is unlikely to cause any interesting legal issues. +When adding a second copyright line for a significant feature or rewrite, add the newer line above the older one: + +.. code-block:: python + + #!/usr/bin/python + # -*- coding: utf-8 -*- + + # Copyright: (c) 2017, [New Contributor(s)] + # Copyright: (c) 2015, [Original Contributor(s)] + # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +.. _ansible_metadata_block: + +ANSIBLE_METADATA block +====================== + +Since we moved to collections we have deprecated the METADATA functionality, it is no longer required for modules, but it will not break anything if present. + + +.. _documentation_block: + +DOCUMENTATION block +=================== + +After the shebang, the UTF-8 coding, the copyright line, and the license section comes the ``DOCUMENTATION`` block. Ansible's online module documentation is generated from the ``DOCUMENTATION`` blocks in each module's source code. The ``DOCUMENTATION`` block must be valid YAML. You may find it easier to start writing your ``DOCUMENTATION`` string in an :ref:`editor with YAML syntax highlighting ` before you include it in your Python file. You can start by copying our `example documentation string `_ into your module file and modifying it. If you run into syntax issues in your YAML, you can validate it on the `YAML Lint `_ website. + +Module documentation should briefly and accurately define what each module and option does, and how it works with others in the underlying system. Documentation should be written for broad audience--readable both by experts and non-experts. + * Descriptions should always start with a capital letter and end with a full stop. Consistency always helps. + * Verify that arguments in doc and module spec dict are identical. + * For password / secret arguments ``no_log=True`` should be set. + * For arguments that seem to contain sensitive information but **do not** contain secrets, such as "password_length", set ``no_log=False`` to disable the warning message. + * If an option is only sometimes required, describe the conditions. For example, "Required when I(state=present)." + * If your module allows ``check_mode``, reflect this fact in the documentation. + +To create clear, concise, consistent, and useful documentation, follow the :ref:`style guide `. + +Each documentation field is described below. Before committing your module documentation, please test it at the command line and as HTML: + +* As long as your module file is :ref:`available locally `, you can use ``ansible-doc -t module my_module_name`` to view your module documentation at the command line. Any parsing errors will be obvious - you can view details by adding ``-vvv`` to the command. +* You should also :ref:`test the HTML output ` of your module documentation. + +Documentation fields +-------------------- + +All fields in the ``DOCUMENTATION`` block are lower-case. All fields are required unless specified otherwise: + +:module: + + * The name of the module. + * Must be the same as the filename, without the ``.py`` extension. + +:short_description: + + * A short description which is displayed on the :ref:`list_of_collections` page and ``ansible-doc -l``. + * The ``short_description`` is displayed by ``ansible-doc -l`` without any category grouping, + so it needs enough detail to explain the module's purpose without the context of the directory structure in which it lives. + * Unlike ``description:``, ``short_description`` should not have a trailing period/full stop. + +:description: + + * A detailed description (generally two or more sentences). + * Must be written in full sentences, in other words, with capital letters and periods/full stops. + * Shouldn't mention the module name. + * Make use of multiple entries rather than using one long paragraph. + * Don't quote complete values unless it is required by YAML. + +:version_added: + + * The version of Ansible when the module was added. + * This is a string, and not a float, for example, ``version_added: '2.1'``. + * In collections, this must be the collection version the module was added to, not the Ansible version. For example, ``version_added: 1.0.0``. + +:author: + + * Name of the module author in the form ``First Last (@GitHubID)``. + * Use a multi-line list if there is more than one author. + * Don't use quotes as it should not be required by YAML. + +:deprecated: + + * Marks modules that will be removed in future releases. See also :ref:`module_lifecycle`. + +:options: + + * Options are often called `parameters` or `arguments`. Because the documentation field is called `options`, we will use that term. + * If the module has no options (for example, it's a ``_facts`` module), all you need is one line: ``options: {}``. + * If your module has options (in other words, accepts arguments), each option should be documented thoroughly. For each module option, include: + + :option-name: + + * Declarative operation (not CRUD), to focus on the final state, for example `online:`, rather than `is_online:`. + * The name of the option should be consistent with the rest of the module, as well as other modules in the same category. + * When in doubt, look for other modules to find option names that are used for the same purpose, we like to offer consistency to our users. + + :description: + + * Detailed explanation of what this option does. It should be written in full sentences. + * The first entry is a description of the option itself; subsequent entries detail its use, dependencies, or format of possible values. + * Should not list the possible values (that's what ``choices:`` is for, though it should explain what the values do if they aren't obvious). + * If an option is only sometimes required, describe the conditions. For example, "Required when I(state=present)." + * Mutually exclusive options must be documented as the final sentence on each of the options. + + :required: + + * Only needed if ``true``. + * If missing, we assume the option is not required. + + :default: + + * If ``required`` is false/missing, ``default`` may be specified (assumed 'null' if missing). + * Ensure that the default value in the docs matches the default value in the code. + * The default field must not be listed as part of the description, unless it requires additional information or conditions. + * If the option is a boolean value, you can use any of the boolean values recognized by Ansible: + (such as true/false or yes/no). Choose the one that reads better in the context of the option. + + :choices: + + * List of option values. + * Should be absent if empty. + + :type: + + * Specifies the data type that option accepts, must match the ``argspec``. + * If an argument is ``type='bool'``, this field should be set to ``type: bool`` and no ``choices`` should be specified. + * If an argument is ``type='list'``, ``elements`` should be specified. + + :elements: + + * Specifies the data type for list elements in case ``type='list'``. + + :aliases: + * List of optional name aliases. + * Generally not needed. + + :version_added: + + * Only needed if this option was extended after initial Ansible release, in other words, this is greater than the top level `version_added` field. + * This is a string, and not a float, for example, ``version_added: '2.3'``. + * In collections, this must be the collection version the option was added to, not the Ansible version. For example, ``version_added: 1.0.0``. + + :suboptions: + + * If this option takes a dict or list of dicts, you can define the structure here. + * See :ref:`ansible_collections.azure.azcollection.azure_rm_securitygroup_module`, :ref:`ansible_collections.azure.azcollection.azure_rm_azurefirewall_module`, and :ref:`ansible_collections.openstack.cloud.baremetal_node_action_module` for examples. + +:requirements: + + * List of requirements (if applicable). + * Include minimum versions. + +:seealso: + + * A list of references to other modules, documentation or Internet resources + * In Ansible 2.10 and later, references to modules must use the FQCN or ``ansible.builtin`` for modules in ``ansible-base``. + * A reference can be one of the following formats: + + + .. code-block:: yaml+jinja + + seealso: + + # Reference by module name + - module: cisco.aci.aci_tenant + + # Reference by module name, including description + - module: cisco.aci.aci_tenant + description: ACI module to create tenants on a Cisco ACI fabric. + + # Reference by rST documentation anchor + - ref: aci_guide + description: Detailed information on how to manage your ACI infrastructure using Ansible. + + # Reference by Internet resource + - name: APIC Management Information Model reference + description: Complete reference of the APIC object model. + link: https://developer.cisco.com/docs/apic-mim-ref/ + +:notes: + + * Details of any important information that doesn't fit in one of the above sections. + * For example, whether ``check_mode`` is or is not supported. + + +Linking and other format macros within module documentation +----------------------------------------------------------- + +You can link from your module documentation to other module docs, other resources on docs.ansible.com, and resources elsewhere on the internet with the help of some pre-defined macros. The correct formats for these macros are: + +* ``L()`` for links with a heading. For example: ``See L(Ansible Tower,https://www.ansible.com/products/tower).`` As of Ansible 2.10, do not use ``L()`` for relative links between Ansible documentation and collection documentation. +* ``U()`` for URLs. For example: ``See U(https://www.ansible.com/products/tower) for an overview.`` +* ``R()`` for cross-references with a heading (added in Ansible 2.10). For example: ``See R(Cisco IOS Platform Guide,ios_platform_options)``. Use the RST anchor for the cross-reference. See :ref:`adding_anchors_rst` for details. +* ``M()`` for module names. For example: ``See also M(ansible.builtin.yum) or M(community.general.apt_rpm)``. + +There are also some macros which do not create links but we use them to display certain types of +content in a uniform way: + +* ``I()`` for option names. For example: ``Required if I(state=present).`` This is italicized in + the documentation. +* ``C()`` for files, option values, and inline code. For example: ``If not set the environment variable C(ACME_PASSWORD) will be used.`` or ``Use C(var | foo.bar.my_filter) to transform C(var) into the required format.`` This displays with a mono-space font in the documentation. +* ``B()`` currently has no standardized usage. It is displayed in boldface in the documentation. +* ``HORIZONTALLINE`` is used sparingly as a separator in long descriptions. It becomes a horizontal rule (the ``
`` html tag) in the documentation. + +.. note:: + + For links between modules and documentation within a collection, you can use any of the options above. For links outside of your collection, use ``R()`` if available. Otherwise, use ``U()`` or ``L()`` with full URLs (not relative links). For modules, use ``M()`` with the FQCN or ``ansible.builtin`` as shown in the example. If you are creating your own documentation site, you will need to use the `intersphinx extension `_ to convert ``R()`` and ``M()`` to the correct links. + + +.. note:: + - To refer to a group of modules in a collection, use ``R()``. When a collection is not the right granularity, use ``C(..)``: + + -``Refer to the R(community.kubernetes collection, plugins_in_community.kubernetes) for information on managing kubernetes clusters.`` + -``The C(win_*) modules (spread across several collections) allow you to manage various aspects of windows hosts.`` + + +.. note:: + + Because it stands out better, use ``seealso`` for general references over the use of notes or adding links to the description. + +.. _module_docs_fragments: + +Documentation fragments +----------------------- + +If you are writing multiple related modules, they may share common documentation, such as authentication details, file mode settings, ``notes:`` or ``seealso:`` entries. Rather than duplicate that information in each module's ``DOCUMENTATION`` block, you can save it once as a doc_fragment plugin and use it in each module's documentation. In Ansible, shared documentation fragments are contained in a ``ModuleDocFragment`` class in `lib/ansible/plugins/doc_fragments/ `_ or the equivalent directory in a collection. To include a documentation fragment, add ``extends_documentation_fragment: FRAGMENT_NAME`` in your module documentation. Use the fully qualified collection name for the FRAGMENT_NAME (for example, ``community.kubernetes.k8s_auth_options``). + +Modules should only use items from a doc fragment if the module will implement all of the interface documented there in a manner that behaves the same as the existing modules which import that fragment. The goal is that items imported from the doc fragment will behave identically when used in another module that imports the doc fragment. + +By default, only the ``DOCUMENTATION`` property from a doc fragment is inserted into the module documentation. It is possible to define additional properties in the doc fragment in order to import only certain parts of a doc fragment or mix and match as appropriate. If a property is defined in both the doc fragment and the module, the module value overrides the doc fragment. + +Here is an example doc fragment named ``example_fragment.py``: + +.. code-block:: python + + class ModuleDocFragment(object): + # Standard documentation + DOCUMENTATION = r''' + options: + # options here + ''' + + # Additional section + OTHER = r''' + options: + # other options here + ''' + + +To insert the contents of ``OTHER`` in a module: + +.. code-block:: yaml+jinja + + extends_documentation_fragment: example_fragment.other + +Or use both : + +.. code-block:: yaml+jinja + + extends_documentation_fragment: + - example_fragment + - example_fragment.other + +.. _note: + * Prior to Ansible 2.8, documentation fragments were kept in ``lib/ansible/utils/module_docs_fragments``. + +.. versionadded:: 2.8 + +Since Ansible 2.8, you can have user-supplied doc_fragments by using a ``doc_fragments`` directory adjacent to play or role, just like any other plugin. + +For example, all AWS modules should include: + +.. code-block:: yaml+jinja + + extends_documentation_fragment: + - aws + - ec2 + +:ref:`docfragments_collections` describes how to incorporate documentation fragments in a collection. + +.. _examples_block: + +EXAMPLES block +============== + +After the shebang, the UTF-8 coding, the copyright line, the license section, and the ``DOCUMENTATION`` block comes the ``EXAMPLES`` block. Here you show users how your module works with real-world examples in multi-line plain-text YAML format. The best examples are ready for the user to copy and paste into a playbook. Review and update your examples with every change to your module. + +Per playbook best practices, each example should include a ``name:`` line:: + + EXAMPLES = r''' + - name: Ensure foo is installed + namespace.collection.modulename: + name: foo + state: present + ''' + +The ``name:`` line should be capitalized and not include a trailing dot. + +Use a fully qualified collection name (FQCN) as a part of the module's name like in the example above. For modules in ``ansible-base``, use the ``ansible.builtin.`` identifier, for example ``ansible.builtin.debug``. + +If your examples use boolean options, use yes/no values. Since the documentation generates boolean values as yes/no, having the examples use these values as well makes the module documentation more consistent. + +If your module returns facts that are often needed, an example of how to use them can be helpful. + +.. _return_block: + +RETURN block +============ + +After the shebang, the UTF-8 coding, the copyright line, the license section, ``DOCUMENTATION`` and ``EXAMPLES`` blocks comes the ``RETURN`` block. This section documents the information the module returns for use by other modules. + +If your module doesn't return anything (apart from the standard returns), this section of your module should read: ``RETURN = r''' # '''`` +Otherwise, for each value returned, provide the following fields. All fields are required unless specified otherwise. + +:return name: + Name of the returned field. + + :description: + Detailed description of what this value represents. Capitalized and with trailing dot. + :returned: + When this value is returned, such as ``always``, ``changed`` or ``success``. This is a string and can contain any human-readable content. + :type: + Data type. + :elements: + If ``type='list'``, specifies the data type of the list's elements. + :sample: + One or more examples. + :version_added: + Only needed if this return was extended after initial Ansible release, in other words, this is greater than the top level `version_added` field. + This is a string, and not a float, for example, ``version_added: '2.3'``. + :contains: + Optional. To describe nested return values, set ``type: dict``, or ``type: list``/``elements: dict``, or if you really have to, ``type: complex``, and repeat the elements above for each sub-field. + +Here are two example ``RETURN`` sections, one with three simple fields and one with a complex nested field:: + + RETURN = r''' + dest: + description: Destination file/path. + returned: success + type: str + sample: /path/to/file.txt + src: + description: Source file used for the copy on the target machine. + returned: changed + type: str + sample: /home/httpd/.ansible/tmp/ansible-tmp-1423796390.97-147729857856000/source + md5sum: + description: MD5 checksum of the file after running copy. + returned: when supported + type: str + sample: 2a5aeecc61dc98c4d780b14b330e3282 + ''' + + RETURN = r''' + packages: + description: Information about package requirements. + returned: success + type: dict + contains: + missing: + description: Packages that are missing from the system. + returned: success + type: list + elements: str + sample: + - libmysqlclient-dev + - libxml2-dev + badversion: + description: Packages that are installed but at bad versions. + returned: success + type: list + elements: dict + sample: + - package: libxml2-dev + version: 2.9.4+dfsg1-2 + constraint: ">= 3.0" + ''' + +.. _python_imports: + +Python imports +============== + +After the shebang, the UTF-8 coding, the copyright line, the license, and the sections for ``DOCUMENTATION``, ``EXAMPLES``, and ``RETURN``, you can finally add the python imports. All modules must use Python imports in the form: + +.. code-block:: python + + from module_utils.basic import AnsibleModule + +The use of "wildcard" imports such as ``from module_utils.basic import *`` is no longer allowed. + +.. _dev_testing_module_documentation: + +Testing module documentation +============================ + +To test Ansible documentation locally please :ref:`follow instruction`. diff --git a/docs/docsite/rst/dev_guide/developing_modules_general.rst b/docs/docsite/rst/dev_guide/developing_modules_general.rst new file mode 100644 index 00000000..cb183b70 --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_modules_general.rst @@ -0,0 +1,221 @@ +.. _developing_modules_general: +.. _module_dev_tutorial_sample: + +******************************************* +Ansible module development: getting started +******************************************* + +A module is a reusable, standalone script that Ansible runs on your behalf, either locally or remotely. Modules interact with your local machine, an API, or a remote system to perform specific tasks like changing a database password or spinning up a cloud instance. Each module can be used by the Ansible API, or by the :command:`ansible` or :command:`ansible-playbook` programs. A module provides a defined interface, accepts arguments, and returns information to Ansible by printing a JSON string to stdout before exiting. + +If you need functionality that is not available in any of the thousands of Ansible modules found in collections, you can easily write your own custom module. When you write a module for local use, you can choose any programming language and follow your own rules. Use this topic to learn how to create an Ansible module in Python. After you create a module, you must add it locally to the appropriate directory so that Ansible can find and execute it. For details about adding a module locally, see :ref:`developing_locally`. + +.. contents:: + :local: + +.. _environment_setup: + +Environment setup +================= + +Prerequisites via apt (Ubuntu) +------------------------------ + +Due to dependencies (for example ansible -> paramiko -> pynacl -> libffi): + +.. code:: bash + + sudo apt update + sudo apt install build-essential libssl-dev libffi-dev python-dev + +Common environment setup +------------------------------ + +1. Clone the Ansible repository: + ``$ git clone https://github.com/ansible/ansible.git`` +2. Change directory into the repository root dir: ``$ cd ansible`` +3. Create a virtual environment: ``$ python3 -m venv venv`` (or for + Python 2 ``$ virtualenv venv``. Note, this requires you to install + the virtualenv package: ``$ pip install virtualenv``) +4. Activate the virtual environment: ``$ . venv/bin/activate`` +5. Install development requirements: + ``$ pip install -r requirements.txt`` +6. Run the environment setup script for each new dev shell process: + ``$ . hacking/env-setup`` + +.. note:: After the initial setup above, every time you are ready to start + developing Ansible you should be able to just run the following from the + root of the Ansible repo: + ``$ . venv/bin/activate && . hacking/env-setup`` + + +Creating an info or a facts module +================================== + +Ansible gathers information about the target machines using facts modules, and gathers information on other objects or files using info modules. +If you find yourself trying to add ``state: info`` or ``state: list`` to an existing module, that is often a sign that a new dedicated ``_facts`` or ``_info`` module is needed. + +In Ansible 2.8 and onwards, we have two type of information modules, they are ``*_info`` and ``*_facts``. + +If a module is named ``_facts``, it should be because its main purpose is returning ``ansible_facts``. Do not name modules that do not do this with ``_facts``. +Only use ``ansible_facts`` for information that is specific to the host machine, for example network interfaces and their configuration, which operating system and which programs are installed. + +Modules that query/return general information (and not ``ansible_facts``) should be named ``_info``. +General information is non-host specific information, for example information on online/cloud services (you can access different accounts for the same online service from the same host), or information on VMs and containers accessible from the machine, or information on individual files or programs. + +Info and facts modules, are just like any other Ansible Module, with a few minor requirements: + +1. They MUST be named ``_info`` or ``_facts``, where is singular. +2. Info ``*_info`` modules MUST return in the form of the :ref:`result dictionary` so other modules can access them. +3. Fact ``*_facts`` modules MUST return in the ``ansible_facts`` field of the :ref:`result dictionary` so other modules can access them. +4. They MUST support :ref:`check_mode `. +5. They MUST NOT make any changes to the system. +6. They MUST document the :ref:`return fields` and :ref:`examples`. + +To create an info module: + +1. Navigate to the correct directory for your new module: ``$ cd lib/ansible/modules/``. If you are developing module using collection, ``$ cd plugins/modules/`` inside your collection development tree. +2. Create your new module file: ``$ touch my_test_info.py``. +3. Paste the content below into your new info module file. It includes the :ref:`required Ansible format and documentation `, a simple :ref:`argument spec for declaring the module options `, and some example code. +4. Modify and extend the code to do what you want your new info module to do. See the :ref:`programming tips ` and :ref:`Python 3 compatibility ` pages for pointers on writing clean and concise module code. + +.. literalinclude:: ../../../../examples/scripts/my_test_info.py + :language: python + +Use the same process to create a facts module. + +.. literalinclude:: ../../../../examples/scripts/my_test_facts.py + :language: python + +Creating a module +================= + +To create a new module: + +1. Navigate to the correct directory for your new module: ``$ cd lib/ansible/modules/``. If you are developing a module in a :ref:`collection `, ``$ cd plugins/modules/`` inside your collection development tree. +2. Create your new module file: ``$ touch my_test.py``. +3. Paste the content below into your new module file. It includes the :ref:`required Ansible format and documentation `, a simple :ref:`argument spec for declaring the module options `, and some example code. +4. Modify and extend the code to do what you want your new module to do. See the :ref:`programming tips ` and :ref:`Python 3 compatibility ` pages for pointers on writing clean and concise module code. + +.. literalinclude:: ../../../../examples/scripts/my_test.py + :language: python + +Exercising your module code +=========================== + +After you modify the sample code above to do what you want, you can try out your module. +Our :ref:`debugging tips ` will help if you run into bugs as you verify your module code. + + +Exercising module code locally +------------------------------ + +If your module does not need to target a remote host, you can quickly and easily exercise your code locally like this: + +- Create an arguments file, a basic JSON config file that passes parameters to your module so you can run it. Name the arguments file ``/tmp/args.json`` and add the following content: + +.. code:: json + + { + "ANSIBLE_MODULE_ARGS": { + "name": "hello", + "new": true + } + } + +- If you are using a virtual environment (highly recommended for + development) activate it: ``$ . venv/bin/activate`` +- Setup the environment for development: ``$ . hacking/env-setup`` +- Run your test module locally and directly: + ``$ python -m ansible.modules.my_test /tmp/args.json`` + +This should return output like this: + +.. code:: json + + {"changed": true, "state": {"original_message": "hello", "new_message": "goodbye"}, "invocation": {"module_args": {"name": "hello", "new": true}}} + + +Exercising module code in a playbook +------------------------------------ + +The next step in testing your new module is to consume it with an Ansible playbook. + +- Create a playbook in any directory: ``$ touch testmod.yml`` +- Add the following to the new playbook file:: + + - name: test my new module + hosts: localhost + tasks: + - name: run the new module + my_test: + name: 'hello' + new: true + register: testout + - name: dump test output + debug: + msg: '{{ testout }}' + +- Run the playbook and analyze the output: ``$ ansible-playbook ./testmod.yml`` + +Testing basics +==================== + +These two examples will get you started with testing your module code. Please review our :ref:`testing ` section for more detailed +information, including instructions for :ref:`testing module documentation `, adding :ref:`integration tests `, and more. + +.. note:: + Every new module and plugin should have integration tests, even if the tests cannot be run on Ansible CI infrastructure. + In this case, the tests should be marked with the ``unsupported`` alias in `aliases file `_. + +Performing sanity tests +----------------------- + +You can run through Ansible's sanity checks in a container: + +``$ ansible-test sanity -v --docker --python 2.7 MODULE_NAME`` + +.. note:: + Note that this example requires Docker to be installed and running. If you'd rather not use a container for this, you can choose to use ``--venv`` instead of ``--docker``. + +Unit tests +---------- + +You can add unit tests for your module in ``./test/units/modules``. You must first setup your testing environment. In this example, we're using Python 3.5. + +- Install the requirements (outside of your virtual environment): ``$ pip3 install -r ./test/lib/ansible_test/_data/requirements/units.txt`` +- Run ``. hacking/env-setup`` +- To run all tests do the following: ``$ ansible-test units --python 3.5``. If you are using a CI environment, these tests will run automatically. + +.. note:: Ansible uses pytest for unit testing. + +To run pytest against a single test module, you can do the following (provide the path to the test module appropriately): + +``$ pytest -r a --cov=. --cov-report=html --fulltrace --color yes +test/units/modules/.../test/my_test.py`` + +Contributing back to Ansible +============================ + +If you would like to contribute to ``ansible-base`` by adding a new feature or fixing a bug, `create a fork `_ of the ansible/ansible repository and develop against a new feature branch using the ``devel`` branch as a starting point. When you you have a good working code change, you can submit a pull request to the Ansible repository by selecting your feature branch as a source and the Ansible devel branch as a target. + +If you want to contribute a module to an :ref:`Ansible collection `, review our :ref:`submission checklist `, :ref:`programming tips `, and :ref:`strategy for maintaining Python 2 and Python 3 compatibility `, as well as information about :ref:`testing ` before you open a pull request. + +The :ref:`Community Guide ` covers how to open a pull request and what happens next. + + +Communication and development support +===================================== + +Join the IRC channel ``#ansible-devel`` on freenode for discussions +surrounding Ansible development. + +For questions and discussions pertaining to using the Ansible product, +use the ``#ansible`` channel. + +For more specific IRC channels look at :ref:`Community Guide, Communicating `. + +Credit +====== + +Thank you to Thomas Stringer (`@trstringer `_) for contributing source +material for this topic. diff --git a/docs/docsite/rst/dev_guide/developing_modules_general_aci.rst b/docs/docsite/rst/dev_guide/developing_modules_general_aci.rst new file mode 100644 index 00000000..97ee2b42 --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_modules_general_aci.rst @@ -0,0 +1,443 @@ +.. _aci_dev_guide: + +**************************** +Developing Cisco ACI modules +**************************** +This is a brief walk-through of how to create new Cisco ACI modules for Ansible. + +For more information about Cisco ACI, look at the :ref:`Cisco ACI user guide `. + +What's covered in this section: + +.. contents:: + :depth: 3 + :local: + + +.. _aci_dev_guide_intro: + +Introduction +============ +The `cisco.aci collection `_ already includes a large number of Cisco ACI modules, however the ACI object model is huge and covering all possible functionality would easily cover more than 1500 individual modules. + +If you need specific functionality, you have 2 options: + +- Learn the ACI object model and use the low-level APIC REST API using the :ref:`aci_rest ` module +- Write your own dedicated modules, which is actually quite easy + +.. seealso:: + + `ACI Fundamentals: ACI Policy Model `_ + A good introduction to the ACI object model. + `APIC Management Information Model reference `_ + Complete reference of the APIC object model. + `APIC REST API Configuration Guide `_ + Detailed guide on how the APIC REST API is designed and used, incl. many examples. + + +So let's look at how a typical ACI module is built up. + + +.. _aci_dev_guide_module_structure: + +ACI module structure +==================== + +Importing objects from Python libraries +--------------------------------------- +The following imports are standard across ACI modules: + +.. code-block:: python + + from ansible.module_utils.aci import ACIModule, aci_argument_spec + from ansible.module_utils.basic import AnsibleModule + + +Defining the argument spec +-------------------------- +The first line adds the standard connection parameters to the module. After that, the next section will update the ``argument_spec`` dictionary with module-specific parameters. The module-specific parameters should include: + +* the object_id (usually the name) +* the configurable properties of the object +* the parent object IDs (all parents up to the root) +* only child classes that are a 1-to-1 relationship (1-to-many/many-to-many require their own module to properly manage) +* the state + + + ``state: absent`` to ensure object does not exist + + ``state: present`` to ensure the object and configs exist; this is also the default + + ``state: query`` to retrieve information about objects in the class + +.. code-block:: python + + def main(): + argument_spec = aci_argument_spec() + argument_spec.update( + object_id=dict(type='str', aliases=['name']), + object_prop1=dict(type='str'), + object_prop2=dict(type='str', choices=['choice1', 'choice2', 'choice3']), + object_prop3=dict(type='int'), + parent_id=dict(type='str'), + child_object_id=dict(type='str'), + child_object_prop=dict(type='str'), + state=dict(type='str', default='present', choices=['absent', 'present', 'query']), + ) + + +.. hint:: Do not provide default values for configuration arguments. Default values could cause unintended changes to the object. + +Using the AnsibleModule object +------------------------------ +The following section creates an AnsibleModule instance. The module should support check-mode, so we pass the ``argument_spec`` and ``supports_check_mode`` arguments. Since these modules support querying the APIC for all objects of the module's class, the object/parent IDs should only be required if ``state: absent`` or ``state: present``. + +.. code-block:: python + + module = AnsibleModule( + argument_spec=argument_spec, + supports_check_mode=True, + required_if=[ + ['state', 'absent', ['object_id', 'parent_id']], + ['state', 'present', ['object_id', 'parent_id']], + ], + ) + + +Mapping variable definition +--------------------------- +Once the AnsibleModule object has been initiated, the necessary parameter values should be extracted from ``params`` and any data validation should be done. Usually the only params that need to be extracted are those related to the ACI object configuration and its child configuration. If you have integer objects that you would like to validate, then the validation should be done here, and the ``ACIModule.payload()`` method will handle the string conversion. + +.. code-block:: python + + object_id = object_id + object_prop1 = module.params['object_prop1'] + object_prop2 = module.params['object_prop2'] + object_prop3 = module.params['object_prop3'] + if object_prop3 is not None and object_prop3 not in range(x, y): + module.fail_json(msg='Valid object_prop3 values are between x and (y-1)') + child_object_id = module.params[' child_objec_id'] + child_object_prop = module.params['child_object_prop'] + state = module.params['state'] + + +Using the ACIModule object +-------------------------- +The ACIModule class handles most of the logic for the ACI modules. The ACIModule extends functionality to the AnsibleModule object, so the module instance must be passed into the class instantiation. + +.. code-block:: python + + aci = ACIModule(module) + +The ACIModule has six main methods that are used by the modules: + +* construct_url +* get_existing +* payload +* get_diff +* post_config +* delete_config + +The first two methods are used regardless of what value is passed to the ``state`` parameter. + +Constructing URLs +^^^^^^^^^^^^^^^^^ +The ``construct_url()`` method is used to dynamically build the appropriate URL to interact with the object, and the appropriate filter string that should be appended to the URL to filter the results. + +* When the ``state`` is not ``query``, the URL is the base URL to access the APIC plus the distinguished name to access the object. The filter string will restrict the returned data to just the configuration data. +* When ``state`` is ``query``, the URL and filter string used depends on what parameters are passed to the object. This method handles the complexity so that it is easier to add new modules and so that all modules are consistent in what type of data is returned. + +.. note:: Our design goal is to take all ID parameters that have values, and return the most specific data possible. If you do not supply any ID parameters to the task, then all objects of the class will be returned. If your task does consist of ID parameters sed, then the data for the specific object is returned. If a partial set of ID parameters are passed, then the module will use the IDs that are passed to build the URL and filter strings appropriately. + +The ``construct_url()`` method takes 2 required arguments: + +* **self** - passed automatically with the class instance +* **root_class** - A dictionary consisting of ``aci_class``, ``aci_rn``, ``target_filter``, and ``module_object`` keys + + + **aci_class**: The name of the class used by the APIC, for example ``fvTenant`` + + + **aci_rn**: The relative name of the object, for example ``tn-ACME`` + + + **target_filter**: A dictionary with key-value pairs that make up the query string for selecting a subset of entries, for example ``{'name': 'ACME'}`` + + + **module_object**: The particular object for this class, for example ``ACME`` + +Example: + +.. code-block:: python + + aci.construct_url( + root_class=dict( + aci_class='fvTenant', + aci_rn='tn-{0}'.format(tenant), + target_filter={'name': tenant}, + module_object=tenant, + ), + ) + +Some modules, like ``aci_tenant``, are the root class and so they would not need to pass any additional arguments to the method. + +The ``construct_url()`` method takes 4 optional arguments, the first three imitate the root class as described above, but are for child objects: + +* subclass_1 - A dictionary consisting of ``aci_class``, ``aci_rn``, ``target_filter``, and ``module_object`` keys + + + Example: Application Profile Class (AP) + +* subclass_2 - A dictionary consisting of ``aci_class``, ``aci_rn``, ``target_filter``, and ``module_object`` keys + + + Example: End Point Group (EPG) + +* subclass_3 - A dictionary consisting of ``aci_class``, ``aci_rn``, ``target_filter``, and ``module_object`` keys + + + Example: Binding a Contract to an EPG + +* child_classes - The list of APIC names for the child classes supported by the modules. + + + This is a list, even if it is a list of one + + These are the unfriendly names used by the APIC + + These are used to limit the returned child_classes when possible + + Example: ``child_classes=['fvRsBDSubnetToProfile', 'fvRsNdPfxPol']`` + +.. note:: Sometimes the APIC will require special characters ([, ], and -) or will use object metadata in the name ("vlanns" for VLAN pools); the module should handle adding special characters or joining of multiple parameters in order to keep expected inputs simple. + +Getting the existing configuration +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Once the URL and filter string have been built, the module is ready to retrieve the existing configuration for the object: + +* ``state: present`` retrieves the configuration to use as a comparison against what was entered in the task. All values that are different than the existing values will be updated. +* ``state: absent`` uses the existing configuration to see if the item exists and needs to be deleted. +* ``state: query`` uses this to perform the query for the task and report back the existing data. + +.. code-block:: python + + aci.get_existing() + + +When state is present +^^^^^^^^^^^^^^^^^^^^^ +When ``state: present``, the module needs to perform a diff against the existing configuration and the task entries. If any value needs to be updated, then the module will make a POST request with only the items that need to be updated. Some modules have children that are in a 1-to-1 relationship with another object; for these cases, the module can be used to manage the child objects. + +Building the ACI payload +"""""""""""""""""""""""" +The ``aci.payload()`` method is used to build a dictionary of the proposed object configuration. All parameters that were not provided a value in the task will be removed from the dictionary (both for the object and its children). Any parameter that does have a value will be converted to a string and added to the final dictionary object that will be used for comparison against the existing configuration. + +The ``aci.payload()`` method takes two required arguments and 1 optional argument, depending on if the module manages child objects. + +* ``aci_class`` is the APIC name for the object's class, for example ``aci_class='fvBD'`` +* ``class_config`` is the appropriate dictionary to be used as the payload for the POST request + + + The keys should match the names used by the APIC. + + The values should be the corresponding value in ``module.params``; these are the variables defined above + +* ``child_configs`` is optional, and is a list of child config dictionaries. + + + The child configs include the full child object dictionary, not just the attributes configuration portion. + + The configuration portion is built the same way as the object. + +.. code-block:: python + + aci.payload( + aci_class=aci_class, + class_config=dict( + name=bd, + descr=description, + type=bd_type, + ), + child_configs=[ + dict( + fvRsCtx=dict( + attributes=dict( + tnFvCtxName=vrf + ), + ), + ), + ], + ) + + +Performing the request +"""""""""""""""""""""" +The ``get_diff()`` method is used to perform the diff, and takes only one required argument, ``aci_class``. +Example: ``aci.get_diff(aci_class='fvBD')`` + +The ``post_config()`` method is used to make the POST request to the APIC if needed. This method doesn't take any arguments and handles check mode. +Example: ``aci.post_config()`` + + +Example code +"""""""""""" +.. code-block:: text + + if state == 'present': + aci.payload( + aci_class='', + class_config=dict( + name=object_id, + prop1=object_prop1, + prop2=object_prop2, + prop3=object_prop3, + ), + child_configs=[ + dict( + ''=dict( + attributes=dict( + child_key=child_object_id, + child_prop=child_object_prop + ), + ), + ), + ], + ) + + aci.get_diff(aci_class='') + + aci.post_config() + + +When state is absent +^^^^^^^^^^^^^^^^^^^^ +If the task sets the state to absent, then the ``delete_config()`` method is all that is needed. This method does not take any arguments, and handles check mode. + +.. code-block:: text + + elif state == 'absent': + aci.delete_config() + + +Exiting the module +^^^^^^^^^^^^^^^^^^ +To have the module exit, call the ACIModule method ``exit_json()``. This method automatically takes care of returning the common return values for you. + +.. code-block:: text + + aci.exit_json() + + if __name__ == '__main__': + main() + + +.. _aci_dev_guide_testing: + +Testing ACI library functions +============================= +You can test your ``construct_url()`` and ``payload()`` arguments without accessing APIC hardware by using the following python script: + +.. code-block:: text + + #!/usr/bin/python + import json + from ansible.module_utils.network.aci.aci import ACIModule + + # Just another class mimicing a bare AnsibleModule class for construct_url() and payload() methods + class AltModule(): + params = dict( + host='dummy', + port=123, + protocol='https', + state='present', + output_level='debug', + ) + + # A sub-class of ACIModule to overload __init__ (we don't need to log into APIC) + class AltACIModule(ACIModule): + def __init__(self): + self.result = dict(changed=False) + self.module = AltModule() + self.params = self.module.params + + # Instantiate our version of the ACI module + aci = AltACIModule() + + # Define the variables you need below + aep = 'AEP' + aep_domain = 'uni/phys-DOMAIN' + + # Below test the construct_url() arguments to see if it produced correct results + aci.construct_url( + root_class=dict( + aci_class='infraAttEntityP', + aci_rn='infra/attentp-{}'.format(aep), + target_filter={'name': aep}, + module_object=aep, + ), + subclass_1=dict( + aci_class='infraRsDomP', + aci_rn='rsdomP-[{}]'.format(aep_domain), + target_filter={'tDn': aep_domain}, + module_object=aep_domain, + ), + ) + + # Below test the payload arguments to see if it produced correct results + aci.payload( + aci_class='infraRsDomP', + class_config=dict(tDn=aep_domain), + ) + + # Print the URL and proposed payload + print 'URL:', json.dumps(aci.url, indent=4) + print 'PAYLOAD:', json.dumps(aci.proposed, indent=4) + + +This will result in: + +.. code-block:: yaml + + URL: "https://dummy/api/mo/uni/infra/attentp-AEP/rsdomP-[phys-DOMAIN].json" + PAYLOAD: { + "infraRsDomP": { + "attributes": { + "tDn": "phys-DOMAIN" + } + } + } + +Testing for sanity checks +------------------------- +You can run from your fork something like: + +.. code-block:: bash + + $ ansible-test sanity --python 2.7 lib/ansible/modules/network/aci/aci_tenant.py + +.. seealso:: + + :ref:`testing_sanity` + Information on how to build sanity tests. + + +Testing ACI integration tests +----------------------------- +You can run this: + +.. code-block:: bash + + $ ansible-test network-integration --continue-on-error --allow-unsupported --diff -v aci_tenant + +.. note:: You may need to add ``--python 2.7`` or ``--python 3.6`` in order to use the correct python version for performing tests. + +You may want to edit the used inventory at *test/integration/inventory.networking* and add something like: + +.. code-block:: ini + + [aci:vars] + aci_hostname=my-apic-1 + aci_username=admin + aci_password=my-password + aci_use_ssl=yes + aci_use_proxy=no + + [aci] + localhost ansible_ssh_host=127.0.0.1 ansible_connection=local + +.. seealso:: + + :ref:`testing_integration` + Information on how to build integration tests. + + +Testing for test coverage +------------------------- +You can run this: + +.. code-block:: bash + + $ ansible-test network-integration --python 2.7 --allow-unsupported --coverage aci_tenant + $ ansible-test coverage report diff --git a/docs/docsite/rst/dev_guide/developing_modules_general_windows.rst b/docs/docsite/rst/dev_guide/developing_modules_general_windows.rst new file mode 100644 index 00000000..3dd66c2e --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_modules_general_windows.rst @@ -0,0 +1,696 @@ +.. _developing_modules_general_windows: + +************************************** +Windows module development walkthrough +************************************** + +In this section, we will walk through developing, testing, and debugging an +Ansible Windows module. + +Because Windows modules are written in Powershell and need to be run on a +Windows host, this guide differs from the usual development walkthrough guide. + +What's covered in this section: + +.. contents:: + :local: + + +Windows environment setup +========================= + +Unlike Python module development which can be run on the host that runs +Ansible, Windows modules need to be written and tested for Windows hosts. +While evaluation editions of Windows can be downloaded from +Microsoft, these images are usually not ready to be used by Ansible without +further modification. The easiest way to set up a Windows host so that it is +ready to by used by Ansible is to set up a virtual machine using Vagrant. +Vagrant can be used to download existing OS images called *boxes* that are then +deployed to a hypervisor like VirtualBox. These boxes can either be created and +stored offline or they can be downloaded from a central repository called +Vagrant Cloud. + +This guide will use the Vagrant boxes created by the `packer-windoze `_ +repository which have also been uploaded to `Vagrant Cloud `_. +To find out more info on how these images are created, please go to the GitHub +repo and look at the ``README`` file. + +Before you can get started, the following programs must be installed (please consult the Vagrant and +VirtualBox documentation for installation instructions): + +- Vagrant +- VirtualBox + +Create a Windows server in a VM +=============================== + +To create a single Windows Server 2016 instance, run the following: + +.. code-block:: shell + + vagrant init jborean93/WindowsServer2016 + vagrant up + +This will download the Vagrant box from Vagrant Cloud and add it to the local +boxes on your host and then start up that instance in VirtualBox. When starting +for the first time, the Windows VM will run through the sysprep process and +then create a HTTP and HTTPS WinRM listener automatically. Vagrant will finish +its process once the listeners are online, after which the VM can be used by Ansible. + +Create an Ansible inventory +=========================== + +The following Ansible inventory file can be used to connect to the newly +created Windows VM: + +.. code-block:: ini + + [windows] + WindowsServer ansible_host=127.0.0.1 + + [windows:vars] + ansible_user=vagrant + ansible_password=vagrant + ansible_port=55986 + ansible_connection=winrm + ansible_winrm_transport=ntlm + ansible_winrm_server_cert_validation=ignore + +.. note:: The port ``55986`` is automatically forwarded by Vagrant to the + Windows host that was created, if this conflicts with an existing local + port then Vagrant will automatically use another one at random and display + show that in the output. + +The OS that is created is based on the image set. The following +images can be used: + +- `jborean93/WindowsServer2008-x86 `_ +- `jborean93/WindowsServer2008-x64 `_ +- `jborean93/WindowsServer2008R2 `_ +- `jborean93/WindowsServer2012 `_ +- `jborean93/WindowsServer2012R2 `_ +- `jborean93/WindowsServer2016 `_ + +When the host is online, it can accessible by RDP on ``127.0.0.1:3389`` but the +port may differ depending if there was a conflict. To get rid of the host, run +``vagrant destroy --force`` and Vagrant will automatically remove the VM and +any other files associated with that VM. + +While this is useful when testing modules on a single Windows instance, these +host won't work without modification with domain based modules. The Vagrantfile +at `ansible-windows `_ +can be used to create a test domain environment to be used in Ansible. This +repo contains three files which are used by both Ansible and Vagrant to create +multiple Windows hosts in a domain environment. These files are: + +- ``Vagrantfile``: The Vagrant file that reads the inventory setup of ``inventory.yml`` and provisions the hosts that are required +- ``inventory.yml``: Contains the hosts that are required and other connection information such as IP addresses and forwarded ports +- ``main.yml``: Ansible playbook called by Vagrant to provision the domain controller and join the child hosts to the domain + +By default, these files will create the following environment: + +- A single domain controller running on Windows Server 2016 +- Five child hosts for each major Windows Server version joined to that domain +- A domain with the DNS name ``domain.local`` +- A local administrator account on each host with the username ``vagrant`` and password ``vagrant`` +- A domain admin account ``vagrant-domain@domain.local`` with the password ``VagrantPass1`` + +The domain name and accounts can be modified by changing the variables +``domain_*`` in the ``inventory.yml`` file if it is required. The inventory +file can also be modified to provision more or less servers by changing the +hosts that are defined under the ``domain_children`` key. The host variable +``ansible_host`` is the private IP that will be assigned to the VirtualBox host +only network adapter while ``vagrant_box`` is the box that will be used to +create the VM. + +Provisioning the environment +============================ + +To provision the environment as is, run the following: + +.. code-block:: shell + + git clone https://github.com/jborean93/ansible-windows.git + cd vagrant + vagrant up + +.. note:: Vagrant provisions each host sequentially so this can take some time + to complete. If any errors occur during the Ansible phase of setting up the + domain, run ``vagrant provision`` to rerun just that step. + +Unlike setting up a single Windows instance with Vagrant, these hosts can also +be accessed using the IP address directly as well as through the forwarded +ports. It is easier to access it over the host only network adapter as the +normal protocol ports are used, for example RDP is still over ``3389``. In cases where +the host cannot be resolved using the host only network IP, the following +protocols can be access over ``127.0.0.1`` using these forwarded ports: + +- ``RDP``: 295xx +- ``SSH``: 296xx +- ``WinRM HTTP``: 297xx +- ``WinRM HTTPS``: 298xx +- ``SMB``: 299xx + +Replace ``xx`` with the entry number in the inventory file where the domain +controller started with ``00`` and is incremented from there. For example, in +the default ``inventory.yml`` file, WinRM over HTTPS for ``SERVER2012R2`` is +forwarded over port ``29804`` as it's the fourth entry in ``domain_children``. + +.. note:: While an SSH server is available on all Windows hosts but Server + 2008 (non R2), it is not a support connection for Ansible managing Windows + hosts and should not be used with Ansible. + +Windows new module development +============================== + +When creating a new module there are a few things to keep in mind: + +- Module code is in Powershell (.ps1) files while the documentation is contained in Python (.py) files of the same name +- Avoid using ``Write-Host/Debug/Verbose/Error`` in the module and add what needs to be returned to the ``$module.Result`` variable +- To fail a module, call ``$module.FailJson("failure message here")``, an Exception or ErrorRecord can be set to the second argument for a more descriptive error message +- You can pass in the exception or ErrorRecord as a second argument to ``FailJson("failure", $_)`` to get a more detailed output +- Most new modules require check mode and integration tests before they are merged into the main Ansible codebase +- Avoid using try/catch statements over a large code block, rather use them for individual calls so the error message can be more descriptive +- Try and catch specific exceptions when using try/catch statements +- Avoid using PSCustomObjects unless necessary +- Look for common functions in ``./lib/ansible/module_utils/powershell/`` and use the code there instead of duplicating work. These can be imported by adding the line ``#Requires -Module *`` where * is the filename to import, and will be automatically included with the module code sent to the Windows target when run via Ansible +- As well as PowerShell module utils, C# module utils are stored in ``./lib/ansible/module_utils/csharp/`` and are automatically imported in a module execution if the line ``#AnsibleRequires -CSharpUtil *`` is present +- C# and PowerShell module utils achieve the same goal but C# allows a developer to implement low level tasks, such as calling the Win32 API, and can be faster in some cases +- Ensure the code runs under Powershell v3 and higher on Windows Server 2008 and higher; if higher minimum Powershell or OS versions are required, ensure the documentation reflects this clearly +- Ansible runs modules under strictmode version 2.0. Be sure to test with that enabled by putting ``Set-StrictMode -Version 2.0`` at the top of your dev script +- Favor native Powershell cmdlets over executable calls if possible +- Use the full cmdlet name instead of aliases, for example ``Remove-Item`` over ``rm`` +- Use named parameters with cmdlets, for example ``Remove-Item -Path C:\temp`` over ``Remove-Item C:\temp`` + +A very basic Powershell module `win_environment `_ incorporates best practices for Powershell modules. It demonstrates how to implement check-mode and diff-support, and also shows a warning to the user when a specific condition is met. + +A slightly more advanced module is `win_uri `_ which additionally shows how to use different parameter types (bool, str, int, list, dict, path) and a selection of choices for parameters, how to fail a module and how to handle exceptions. + +As part of the new ``AnsibleModule`` wrapper, the input parameters are defined and validated based on an argument +spec. The following options can be set at the root level of the argument spec: + +- ``mutually_exclusive``: A list of lists, where the inner list contains module options that cannot be set together +- ``no_log``: Stops the module from emitting any logs to the Windows Event log +- ``options``: A dictionary where the key is the module option and the value is the spec for that option +- ``required_by``: A dictionary where the option(s) specified by the value must be set if the option specified by the key is also set +- ``required_if``: A list of lists where the inner list contains 3 or 4 elements; + * The first element is the module option to check the value against + * The second element is the value of the option specified by the first element, if matched then the required if check is run + * The third element is a list of required module options when the above is matched + * An optional fourth element is a boolean that states whether all module options in the third elements are required (default: ``$false``) or only one (``$true``) +- ``required_one_of``: A list of lists, where the inner list contains module options where at least one must be set +- ``required_together``: A list of lists, where the inner list contains module options that must be set together +- ``supports_check_mode``: Whether the module supports check mode, by default this is ``$false`` + +The actual input options for a module are set within the ``options`` value as a dictionary. The keys of this dictionary +are the module option names while the values are the spec of that module option. Each spec can have the following +options set: + +- ``aliases``: A list of aliases for the module option +- ``choices``: A list of valid values for the module option, if ``type=list`` then each list value is validated against the choices and not the list itself +- ``default``: The default value for the module option if not set +- ``deprecated_aliases``: A list of hashtables that define aliases that are deprecated and the versions they will be removed in. Each entry must contain the keys ``name`` and ``collection_name`` with either ``version`` or ``date`` +- ``elements``: When ``type=list``, this sets the type of each list value, the values are the same as ``type`` +- ``no_log``: Will sanitise the input value before being returned in the ``module_invocation`` return value +- ``removed_in_version``: States when a deprecated module option is to be removed, a warning is displayed to the end user if set +- ``removed_at_date``: States the date (YYYY-MM-DD) when a deprecated module option will be removed, a warning is displayed to the end user if set +- ``removed_from_collection``: States from which collection the deprecated module option will be removed; must be specified if one of ``removed_in_version`` and ``removed_at_date`` is specified +- ``required``: Will fail when the module option is not set +- ``type``: The type of the module option, if not set then it defaults to ``str``. The valid types are; + * ``bool``: A boolean value + * ``dict``: A dictionary value, if the input is a JSON or key=value string then it is converted to dictionary + * ``float``: A float or `Single `_ value + * ``int``: An Int32 value + * ``json``: A string where the value is converted to a JSON string if the input is a dictionary + * ``list``: A list of values, ``elements=`` can convert the individual list value types if set. If ``elements=dict`` then ``options`` is defined, the values will be validated against the argument spec. When the input is a string then the string is split by ``,`` and any whitespace is trimmed + * ``path``: A string where values likes ``%TEMP%`` are expanded based on environment values. If the input value starts with ``\\?\`` then no expansion is run + * ``raw``: No conversions occur on the value passed in by Ansible + * ``sid``: Will convert Windows security identifier values or Windows account names to a `SecurityIdentifier `_ value + * ``str``: The value is converted to a string + +When ``type=dict``, or ``type=list`` and ``elements=dict``, the following keys can also be set for that module option: + +- ``apply_defaults``: The value is based on the ``options`` spec defaults for that key if ``True`` and null if ``False``. Only valid when the module option is not defined by the user and ``type=dict``. +- ``mutually_exclusive``: Same as the root level ``mutually_exclusive`` but validated against the values in the sub dict +- ``options``: Same as the root level ``options`` but contains the valid options for the sub option +- ``required_if``: Same as the root level ``required_if`` but validated against the values in the sub dict +- ``required_by``: Same as the root level ``required_by`` but validated against the values in the sub dict +- ``required_together``: Same as the root level ``required_together`` but validated against the values in the sub dict +- ``required_one_of``: Same as the root level ``required_one_of`` but validated against the values in the sub dict + +A module type can also be a delegate function that converts the value to whatever is required by the module option. For +example the following snippet shows how to create a custom type that creates a ``UInt64`` value: + +.. code-block:: powershell + + $spec = @{ + uint64_type = @{ type = [Func[[Object], [UInt64]]]{ [System.UInt64]::Parse($args[0]) } } + } + $uint64_type = $module.Params.uint64_type + +When in doubt, look at some of the other core modules and see how things have been +implemented there. + +Sometimes there are multiple ways that Windows offers to complete a task; this +is the order to favor when writing modules: + +- Native Powershell cmdlets like ``Remove-Item -Path C:\temp -Recurse`` +- .NET classes like ``[System.IO.Path]::GetRandomFileName()`` +- WMI objects through the ``New-CimInstance`` cmdlet +- COM objects through ``New-Object -ComObject`` cmdlet +- Calls to native executables like ``Secedit.exe`` + +PowerShell modules support a small subset of the ``#Requires`` options built +into PowerShell as well as some Ansible-specific requirements specified by +``#AnsibleRequires``. These statements can be placed at any point in the script, +but are most commonly near the top. They are used to make it easier to state the +requirements of the module without writing any of the checks. Each ``requires`` +statement must be on its own line, but there can be multiple requires statements +in one script. + +These are the checks that can be used within Ansible modules: + +- ``#Requires -Module Ansible.ModuleUtils.``: Added in Ansible 2.4, specifies a module_util to load in for the module execution. +- ``#Requires -Version x.y``: Added in Ansible 2.5, specifies the version of PowerShell that is required by the module. The module will fail if this requirement is not met. +- ``#AnsibleRequires -OSVersion x.y``: Added in Ansible 2.5, specifies the OS build version that is required by the module and will fail if this requirement is not met. The actual OS version is derived from ``[Environment]::OSVersion.Version``. +- ``#AnsibleRequires -Become``: Added in Ansible 2.5, forces the exec runner to run the module with ``become``, which is primarily used to bypass WinRM restrictions. If ``ansible_become_user`` is not specified then the ``SYSTEM`` account is used instead. +- ``#AnsibleRequires -CSharpUtil Ansible.``: Added in Ansible 2.8, specifies a C# module_util to load in for the module execution. + +C# module utils can reference other C# utils by adding the line +``using Ansible.;`` to the top of the script with all the other +using statements. + + +Windows module utilities +======================== + +Like Python modules, PowerShell modules also provide a number of module +utilities that provide helper functions within PowerShell. These module_utils +can be imported by adding the following line to a PowerShell module: + +.. code-block:: powershell + + #Requires -Module Ansible.ModuleUtils.Legacy + +This will import the module_util at ``./lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1`` +and enable calling all of its functions. As of Ansible 2.8, Windows module +utils can also be written in C# and stored at ``lib/ansible/module_utils/csharp``. +These module_utils can be imported by adding the following line to a PowerShell +module: + +.. code-block:: powershell + + #AnsibleRequires -CSharpUtil Ansible.Basic + +This will import the module_util at ``./lib/ansible/module_utils/csharp/Ansible.Basic.cs`` +and automatically load the types in the executing process. C# module utils can +reference each other and be loaded together by adding the following line to the +using statements at the top of the util: + +.. code-block:: csharp + + using Ansible.Become; + +There are special comments that can be set in a C# file for controlling the +compilation parameters. The following comments can be added to the script; + +- ``//AssemblyReference -Name [-CLR [Core|Framework]]``: The assembly DLL to reference during compilation, the optional ``-CLR`` flag can also be used to state whether to reference when running under .NET Core, Framework, or both (if omitted) +- ``//NoWarn -Name [-CLR [Core|Framework]]``: A compiler warning ID to ignore when compiling the code, the optional ``-CLR`` works the same as above. A list of warnings can be found at `Compiler errors `_ + +As well as this, the following pre-processor symbols are defined; + +- ``CORECLR``: This symbol is present when PowerShell is running through .NET Core +- ``WINDOWS``: This symbol is present when PowerShell is running on Windows +- ``UNIX``: This symbol is present when PowerShell is running on Unix + +A combination of these flags help to make a module util interoperable on both +.NET Framework and .NET Core, here is an example of them in action: + +.. code-block:: csharp + + #if CORECLR + using Newtonsoft.Json; + #else + using System.Web.Script.Serialization; + #endif + + //AssemblyReference -Name Newtonsoft.Json.dll -CLR Core + //AssemblyReference -Name System.Web.Extensions.dll -CLR Framework + + // Ignore error CS1702 for all .NET types + //NoWarn -Name CS1702 + + // Ignore error CS1956 only for .NET Framework + //NoWarn -Name CS1956 -CLR Framework + + +The following is a list of module_utils that are packaged with Ansible and a general description of what +they do: + +- ArgvParser: Utiliy used to convert a list of arguments to an escaped string compliant with the Windows argument parsing rules. +- CamelConversion: Utility used to convert camelCase strings/lists/dicts to snake_case. +- CommandUtil: Utility used to execute a Windows process and return the stdout/stderr and rc as separate objects. +- FileUtil: Utility that expands on the ``Get-ChildItem`` and ``Test-Path`` to work with special files like ``C:\pagefile.sys``. +- Legacy: General definitions and helper utilities for Ansible module. +- LinkUtil: Utility to create, remove, and get information about symbolic links, junction points and hard inks. +- SID: Utilities used to convert a user or group to a Windows SID and vice versa. + +For more details on any specific module utility and their requirements, please see the `Ansible +module utilities source code `_. + +PowerShell module utilities can be stored outside of the standard Ansible +distribution for use with custom modules. Custom module_utils are placed in a +folder called ``module_utils`` located in the root folder of the playbook or role +directory. + +C# module utilities can also be stored outside of the standard Ansible distribution for use with custom modules. Like +PowerShell utils, these are stored in a folder called ``module_utils`` and the filename must end in the extension +``.cs``, start with ``Ansible.`` and be named after the namespace defined in the util. + +The below example is a role structure that contains two PowerShell custom module_utils called +``Ansible.ModuleUtils.ModuleUtil1``, ``Ansible.ModuleUtils.ModuleUtil2``, and a C# util containing the namespace +``Ansible.CustomUtil``:: + + meta/ + main.yml + defaults/ + main.yml + module_utils/ + Ansible.ModuleUtils.ModuleUtil1.psm1 + Ansible.ModuleUtils.ModuleUtil2.psm1 + Ansible.CustomUtil.cs + tasks/ + main.yml + +Each PowerShell module_util must contain at least one function that has been exported with ``Export-ModuleMember`` +at the end of the file. For example + +.. code-block:: powershell + + Export-ModuleMember -Function Invoke-CustomUtil, Get-CustomInfo + + +Exposing shared module options +++++++++++++++++++++++++++++++ + +PowerShell module utils can easily expose common module options that a module can use when building its argument spec. +This allows common features to be stored and maintained in one location and have those features used by multiple +modules with minimal effort. Any new features or bugifxes added to one of these utils are then automatically used by +the various modules that call that util. + +An example of this would be to have a module util that handles authentication and communication against an API This +util can be used by multiple modules to expose a common set of module options like the API endpoint, username, +password, timeout, cert validation, and so on without having to add those options to each module spec. + +The standard convention for a module util that has a shared argument spec would have + +- A ``Get-Spec`` function that outputs the common spec for a module + * It is highly recommended to make this function name be unique to the module to avoid any conflicts with other utils that can be loaded + * The format of the output spec is a Hashtable in the same format as the ``$spec`` used for normal modules +- A function that takes in an ``AnsibleModule`` object called under the ``-Module`` parameter which it can use to get the shared options + +Because these options can be shared across various module it is highly recommended to keep the module option names and +aliases in the shared spec as specific as they can be. For example do not have a util option called ``password``, +rather you should prefix it with a unique name like ``acme_password``. + +.. warning:: + Failure to have a unique option name or alias can prevent the util being used by module that also use those names or + aliases for its own options. + +The following is an example module util called ``ServiceAuth.psm1`` in a collection that implements a common way for +modules to authentication with a service. + +.. code-block:: powershell + + Invoke-MyServiceResource { + [CmdletBinding()] + param ( + [Parameter(Mandatory=$true)] + [ValidateScript({ $_.GetType().FullName -eq 'Ansible.Basic.AnsibleModule' })] + $Module, + + [Parameter(Mandatory=$true)] + [String] + $ResourceId + + [String] + $State = 'present' + ) + + # Process the common module options known to the util + $params = @{ + ServerUri = $Module.Params.my_service_url + } + if ($Module.Params.my_service_username) { + $params.Credential = Get-MyServiceCredential + } + + if ($State -eq 'absent') { + Remove-MyService @params -ResourceId $ResourceId + } else { + New-MyService @params -ResourceId $ResourceId + } + } + + Get-MyNamespaceMyCollectionServiceAuthSpec { + # Output the util spec + @{ + options = @{ + my_service_url = @{ type = 'str'; required = $true } + my_service_username = @{ type = 'str' } + my_service_password = @{ type = 'str'; no_log = $true } + } + + required_together = @( + ,@('my_service_username', 'my_service_password') + ) + } + } + + $exportMembers = @{ + Function = 'Get-MyNamespaceMyCollectionServiceAuthSpec', 'Invoke-MyServiceResource' + } + Export-ModuleMember @exportMembers + + +For a module to take advantage of this common argument spec it can be set out like + +.. code-block:: powershell + + #!powershell + + # Include the module util ServiceAuth.psm1 from the my_namespace.my_collection collection + #AnsibleRequires -PowerShell ansible_collections.my_namespace.my_collection.plugins.module_utils.ServiceAuth + + # Create the module spec like normal + $spec = @{ + options = @{ + resource_id = @{ type = 'str'; required = $true } + state = @{ type = 'str'; choices = 'absent', 'present' } + } + } + + # Create the module from the module spec but also include the util spec to merge into our own. + $module = [Ansible.Basic.AnsibleModule]::Create($args, $spec, @(Get-MyNamespaceMyCollectionServiceAuthSpec)) + + # Call the ServiceAuth module util and pass in the module object so it can access the module options. + Invoke-MyServiceResource -Module $module -ResourceId $module.Params.resource_id -State $module.params.state + + $module.ExitJson() + + +.. note:: + Options defined in the module spec will always have precedence over a util spec. Any list values under the same key + in a util spec will be appended to the module spec for that same key. Dictionary values will add any keys that are + missing from the module spec and merge any values that are lists or dictionaries. This is similar to how the doc + fragment plugins work when extending module documentation. + +To document these shared util options for a module, create a doc fragment plugin that documents the options implemented +by the module util and extend the module docs for every module that implements the util to include that fragment in +its docs. + + +Windows playbook module testing +=============================== + +You can test a module with an Ansible playbook. For example: + +- Create a playbook in any directory ``touch testmodule.yml``. +- Create an inventory file in the same directory ``touch hosts``. +- Populate the inventory file with the variables required to connect to a Windows host(s). +- Add the following to the new playbook file:: + + --- + - name: test out windows module + hosts: windows + tasks: + - name: test out module + win_module: + name: test name + +- Run the playbook ``ansible-playbook -i hosts testmodule.yml`` + +This can be useful for seeing how Ansible runs with +the new module end to end. Other possible ways to test the module are +shown below. + + +Windows debugging +================= + +Debugging a module currently can only be done on a Windows host. This can be +useful when developing a new module or implementing bug fixes. These +are some steps that need to be followed to set this up: + +- Copy the module script to the Windows server +- Copy the folders ``./lib/ansible/module_utils/powershell`` and ``./lib/ansible/module_utils/csharp`` to the same directory as the script above +- Add an extra ``#`` to the start of any ``#Requires -Module`` lines in the module code, this is only required for any lines starting with ``#Requires -Module`` +- Add the following to the start of the module script that was copied to the server: + +.. code-block:: powershell + + # Set $ErrorActionPreference to what's set during Ansible execution + $ErrorActionPreference = "Stop" + + # Set the first argument as the path to a JSON file that contains the module args + $args = @("$($pwd.Path)\args.json") + + # Or instead of an args file, set $complex_args to the pre-processed module args + $complex_args = @{ + _ansible_check_mode = $false + _ansible_diff = $false + path = "C:\temp" + state = "present" + } + + # Import any C# utils referenced with '#AnsibleRequires -CSharpUtil' or 'using Ansible.; + # The $_csharp_utils entries should be the context of the C# util files and not the path + Import-Module -Name "$($pwd.Path)\powershell\Ansible.ModuleUtils.AddType.psm1" + $_csharp_utils = @( + [System.IO.File]::ReadAllText("$($pwd.Path)\csharp\Ansible.Basic.cs") + ) + Add-CSharpType -References $_csharp_utils -IncludeDebugInfo + + # Import any PowerShell modules referenced with '#Requires -Module` + Import-Module -Name "$($pwd.Path)\powershell\Ansible.ModuleUtils.Legacy.psm1" + + # End of the setup code and start of the module code + #!powershell + +You can add more args to ``$complex_args`` as required by the module or define the module options through a JSON file +with the structure:: + + { + "ANSIBLE_MODULE_ARGS": { + "_ansible_check_mode": false, + "_ansible_diff": false, + "path": "C:\\temp", + "state": "present" + } + } + +There are multiple IDEs that can be used to debug a Powershell script, two of +the most popular ones are + +- `Powershell ISE`_ +- `Visual Studio Code`_ + +.. _Powershell ISE: https://docs.microsoft.com/en-us/powershell/scripting/core-powershell/ise/how-to-debug-scripts-in-windows-powershell-ise +.. _Visual Studio Code: https://blogs.technet.microsoft.com/heyscriptingguy/2017/02/06/debugging-powershell-script-in-visual-studio-code-part-1/ + +To be able to view the arguments as passed by Ansible to the module follow +these steps. + +- Prefix the Ansible command with :envvar:`ANSIBLE_KEEP_REMOTE_FILES=1` to specify that Ansible should keep the exec files on the server. +- Log onto the Windows server using the same user account that Ansible used to execute the module. +- Navigate to ``%TEMP%\..``. It should contain a folder starting with ``ansible-tmp-``. +- Inside this folder, open the PowerShell script for the module. +- In this script is a raw JSON script under ``$json_raw`` which contains the module arguments under ``module_args``. These args can be assigned manually to the ``$complex_args`` variable that is defined on your debug script or put in the ``args.json`` file. + + +Windows unit testing +==================== + +Currently there is no mechanism to run unit tests for Powershell modules under Ansible CI. + + +Windows integration testing +=========================== + +Integration tests for Ansible modules are typically written as Ansible roles. These test +roles are located in ``./test/integration/targets``. You must first set up your testing +environment, and configure a test inventory for Ansible to connect to. + +In this example we will set up a test inventory to connect to two hosts and run the integration +tests for win_stat: + +- Run the command ``source ./hacking/env-setup`` to prepare environment. +- Create a copy of ``./test/integration/inventory.winrm.template`` and name it ``inventory.winrm``. +- Fill in entries under ``[windows]`` and set the required variables that are needed to connect to the host. +- :ref:`Install the required Python modules ` to support WinRM and a configured authentication method. +- To execute the integration tests, run ``ansible-test windows-integration win_stat``; you can replace ``win_stat`` with the role you want to test. + +This will execute all the tests currently defined for that role. You can set +the verbosity level using the ``-v`` argument just as you would with +ansible-playbook. + +When developing tests for a new module, it is recommended to test a scenario once in +check mode and twice not in check mode. This ensures that check mode +does not make any changes but reports a change, as well as that the second run is +idempotent and does not report changes. For example: + +.. code-block:: yaml + + - name: remove a file (check mode) + win_file: + path: C:\temp + state: absent + register: remove_file_check + check_mode: yes + + - name: get result of remove a file (check mode) + win_command: powershell.exe "if (Test-Path -Path 'C:\temp') { 'true' } else { 'false' }" + register: remove_file_actual_check + + - name: assert remove a file (check mode) + assert: + that: + - remove_file_check is changed + - remove_file_actual_check.stdout == 'true\r\n' + + - name: remove a file + win_file: + path: C:\temp + state: absent + register: remove_file + + - name: get result of remove a file + win_command: powershell.exe "if (Test-Path -Path 'C:\temp') { 'true' } else { 'false' }" + register: remove_file_actual + + - name: assert remove a file + assert: + that: + - remove_file is changed + - remove_file_actual.stdout == 'false\r\n' + + - name: remove a file (idempotent) + win_file: + path: C:\temp + state: absent + register: remove_file_again + + - name: assert remove a file (idempotent) + assert: + that: + - not remove_file_again is changed + + +Windows communication and development support +============================================= + +Join the IRC channel ``#ansible-devel`` or ``#ansible-windows`` on freenode for +discussions about Ansible development for Windows. + +For questions and discussions pertaining to using the Ansible product, +use the ``#ansible`` channel. diff --git a/docs/docsite/rst/dev_guide/developing_modules_in_groups.rst b/docs/docsite/rst/dev_guide/developing_modules_in_groups.rst new file mode 100644 index 00000000..31a9ec9d --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_modules_in_groups.rst @@ -0,0 +1,80 @@ +.. _developing_modules_in_groups: + +************************* +Creating a new collection +************************* + +Starting with Ansible 2.10, related modules should be developed in a collection. The Ansible core team and community compiled these module development tips and tricks to help companies developing Ansible modules for their products and users developing Ansible modules for third-party products. See :ref:`developing_collections` for a more detailed description of the collections format and additional development guidelines. + +.. contents:: + :local: + +.. include:: shared_snippets/licensing.txt + +Before you start coding +======================= + +This list of prerequisites is designed to help ensure that you develop high-quality modules that work well with ansible-base and provide a seamless user experience. + +* Read though all the pages linked off :ref:`developing_modules_general`; paying particular focus to the :ref:`developing_modules_checklist`. +* We encourage PEP 8 compliance. See :ref:`testing_pep8` for more information. +* We encourage supporting :ref:`Python 2.6+ and Python 3.5+ `. +* Look at Ansible Galaxy and review the naming conventions in your functional area (such as cloud, networking, databases). +* With great power comes great responsibility: Ansible collection maintainers have a duty to help keep content up to date and release collections they are responsible for regularly. As with all successful community projects, collection maintainers should keep a watchful eye for reported issues and contributions. +* We strongly recommend unit and/or integration tests. Unit tests are especially valuable when external resources (such as cloud or network devices) are required. For more information see :ref:`developing_testing` and the `Testing Working Group `_. + + +Naming conventions +================== + +Fully Qualified Collection Names (FQCNs) for plugins and modules include three elements: + + * the Galaxy namespace, which generally represents the company or group + * the collection name, which generally represents the product or OS + * the plugin or module name + * always in lower case + * words separated with an underscore (``_``) character + * singular, rather than plural, for example, ``command`` not ``commands`` + +For example, ``community.mongodb.mongodb_linux`` or ``cisco.meraki.meraki_device``. + +It is convenient if the organization and repository names on GitHub (or elsewhere) match your namespace and collection names on Ansible Galaxy, but it is not required. The plugin names you select, however, are always the same in your code repository and in your collection artifact on Galaxy. + +Speak to us +=========== + +Circulating your ideas before coding helps you adopt good practices and avoid common mistakes. After reading the "Before you start coding" section you should have a reasonable idea of the structure of your modules. Write a list of your proposed plugin and/or module names, with a short description of what each one does. Circulate that list on IRC or a mailing list so the Ansible community can review your ideas for consistency and familiarity. Names and functionality that are consistent, predictable, and familiar make your collection easier to use. + +Where to get support +==================== + +Ansible has a thriving and knowledgeable community of module developers that is a great resource for getting your questions answered. + +In the :ref:`ansible_community_guide` you can find how to: + +* Subscribe to the Mailing Lists - We suggest "Ansible Development List" and "Ansible Announce list" +* ``#ansible-devel`` - We have found that IRC ``#ansible-devel`` on FreeNode's IRC network works best for developers so we can have an interactive dialogue. +* IRC meetings - Join the various weekly IRC meetings `meeting schedule and agenda page `_ + +Required files +============== + +Your collection should include the following files to be usable: + +* an ``__init__.py`` file - An empty file to initialize namespace and allow Python to import the files. *Required* +* at least one plugin, for example, ``/plugins/modules/$your_first_module.py``. *Required* +* if needed, one or more ``/plugins/doc_fragments/$topic.py`` files - Code documentation, such as details regarding common arguments. *Optional* +* if needed, one or more ``/plugins/module_utils/$topic.py`` files - Code shared between more than one module, such as common arguments. *Optional* + +When you have these files ready, review the :ref:`developing_modules_checklist` again. If you are creating a new collection, you are responsible for all procedures related to your repository, including setting rules for contributions, finding reviewers, and testing and maintaining the code in your collection. + +If you need help or advice, consider join the ``#ansible-devel`` IRC channel (see how in the "Where to get support"). + +New to git or GitHub +==================== + +We realize this may be your first use of Git or GitHub. The following guides may be of use: + +* `How to create a fork of ansible/ansible `_ +* `How to sync (update) your fork `_ +* `How to create a Pull Request (PR) `_ diff --git a/docs/docsite/rst/dev_guide/developing_plugins.rst b/docs/docsite/rst/dev_guide/developing_plugins.rst new file mode 100644 index 00000000..e40a3281 --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_plugins.rst @@ -0,0 +1,495 @@ +.. _developing_plugins: +.. _plugin_guidelines: + +****************** +Developing plugins +****************** + +.. contents:: + :local: + +Plugins augment Ansible's core functionality with logic and features that are accessible to all modules. Ansible collections include a number of handy plugins, and you can easily write your own. All plugins must: + +* be written in Python +* raise errors +* return strings in unicode +* conform to Ansible's configuration and documentation standards + +Once you've reviewed these general guidelines, you can skip to the particular type of plugin you want to develop. + +Writing plugins in Python +========================= + +You must write your plugin in Python so it can be loaded by the ``PluginLoader`` and returned as a Python object that any module can use. Since your plugin will execute on the controller, you must write it in a :ref:`compatible version of Python `. + +Raising errors +============== + +You should return errors encountered during plugin execution by raising ``AnsibleError()`` or a similar class with a message describing the error. When wrapping other exceptions into error messages, you should always use the ``to_native`` Ansible function to ensure proper string compatibility across Python versions: + +.. code-block:: python + + from ansible.module_utils._text import to_native + + try: + cause_an_exception() + except Exception as e: + raise AnsibleError('Something happened, this was original exception: %s' % to_native(e)) + +Check the different `AnsibleError objects `_ and see which one applies best to your situation. + +String encoding +=============== + +You must convert any strings returned by your plugin into Python's unicode type. Converting to unicode ensures that these strings can run through Jinja2. To convert strings: + +.. code-block:: python + + from ansible.module_utils._text import to_text + result_string = to_text(result_string) + +Plugin configuration & documentation standards +============================================== + +To define configurable options for your plugin, describe them in the ``DOCUMENTATION`` section of the python file. Callback and connection plugins have declared configuration requirements this way since Ansible version 2.4; most plugin types now do the same. This approach ensures that the documentation of your plugin's options will always be correct and up-to-date. To add a configurable option to your plugin, define it in this format: + +.. code-block:: yaml + + options: + option_name: + description: describe this config option + default: default value for this config option + env: + - name: NAME_OF_ENV_VAR + ini: + - section: section_of_ansible.cfg_where_this_config_option_is_defined + key: key_used_in_ansible.cfg + required: True/False + type: boolean/float/integer/list/none/path/pathlist/pathspec/string/tmppath + version_added: X.x + +To access the configuration settings in your plugin, use ``self.get_option()``. For most plugin types, the controller pre-populates the settings. If you need to populate settings explicitly, use a ``self.set_options()`` call. + +Plugins that support embedded documentation (see :ref:`ansible-doc` for the list) should include well-formed doc strings. If you inherit from a plugin, you must document the options it takes, either via a documentation fragment or as a copy. See :ref:`module_documenting` for more information on correct documentation. Thorough documentation is a good idea even if you're developing a plugin for local use. + +Developing particular plugin types +================================== + +.. _developing_actions: + +Action plugins +-------------- + +Action plugins let you integrate local processing and local data with module functionality. + +To create an action plugin, create a new class with the Base(ActionBase) class as the parent: + +.. code-block:: python + + from ansible.plugins.action import ActionBase + + class ActionModule(ActionBase): + pass + +From there, execute the module using the ``_execute_module`` method to call the original module. +After successful execution of the module, you can modify the module return data. + +.. code-block:: python + + module_return = self._execute_module(module_name='', + module_args=module_args, + task_vars=task_vars, tmp=tmp) + + +For example, if you wanted to check the time difference between your Ansible controller and your target machine(s), you could write an action plugin to check the local time and compare it to the return data from Ansible's ``setup`` module: + +.. code-block:: python + + #!/usr/bin/python + # Make coding more python3-ish, this is required for contributions to Ansible + from __future__ import (absolute_import, division, print_function) + __metaclass__ = type + + from ansible.plugins.action import ActionBase + from datetime import datetime + + + class ActionModule(ActionBase): + def run(self, tmp=None, task_vars=None): + super(ActionModule, self).run(tmp, task_vars) + module_args = self._task.args.copy() + module_return = self._execute_module(module_name='setup', + module_args=module_args, + task_vars=task_vars, tmp=tmp) + ret = dict() + remote_date = None + if not module_return.get('failed'): + for key, value in module_return['ansible_facts'].items(): + if key == 'ansible_date_time': + remote_date = value['iso8601'] + + if remote_date: + remote_date_obj = datetime.strptime(remote_date, '%Y-%m-%dT%H:%M:%SZ') + time_delta = datetime.now() - remote_date_obj + ret['delta_seconds'] = time_delta.seconds + ret['delta_days'] = time_delta.days + ret['delta_microseconds'] = time_delta.microseconds + + return dict(ansible_facts=dict(ret)) + + +This code checks the time on the controller, captures the date and time for the remote machine using the ``setup`` module, and calculates the difference between the captured time and +the local time, returning the time delta in days, seconds and microseconds. + +For practical examples of action plugins, +see the source code for the `action plugins included with Ansible Core `_ + +.. _developing_cache_plugins: + +Cache plugins +------------- + +Cache plugins store gathered facts and data retrieved by inventory plugins. + +Import cache plugins using the cache_loader so you can use ``self.set_options()`` and ``self.get_option()``. If you import a cache plugin directly in the code base, you can only access options via ``ansible.constants``, and you break the cache plugin's ability to be used by an inventory plugin. + +.. code-block:: python + + from ansible.plugins.loader import cache_loader + [...] + plugin = cache_loader.get('custom_cache', **cache_kwargs) + +There are two base classes for cache plugins, ``BaseCacheModule`` for database-backed caches, and ``BaseCacheFileModule`` for file-backed caches. + +To create a cache plugin, start by creating a new ``CacheModule`` class with the appropriate base class. If you're creating a plugin using an ``__init__`` method you should initialize the base class with any provided args and kwargs to be compatible with inventory plugin cache options. The base class calls ``self.set_options(direct=kwargs)``. After the base class ``__init__`` method is called ``self.get_option()`` should be used to access cache options. + +New cache plugins should take the options ``_uri``, ``_prefix``, and ``_timeout`` to be consistent with existing cache plugins. + +.. code-block:: python + + from ansible.plugins.cache import BaseCacheModule + + class CacheModule(BaseCacheModule): + def __init__(self, *args, **kwargs): + super(CacheModule, self).__init__(*args, **kwargs) + self._connection = self.get_option('_uri') + self._prefix = self.get_option('_prefix') + self._timeout = self.get_option('_timeout') + +If you use the ``BaseCacheModule``, you must implement the methods ``get``, ``contains``, ``keys``, ``set``, ``delete``, ``flush``, and ``copy``. The ``contains`` method should return a boolean that indicates if the key exists and has not expired. Unlike file-based caches, the ``get`` method does not raise a KeyError if the cache has expired. + +If you use the ``BaseFileCacheModule``, you must implement ``_load`` and ``_dump`` methods that will be called from the base class methods ``get`` and ``set``. + +If your cache plugin stores JSON, use ``AnsibleJSONEncoder`` in the ``_dump`` or ``set`` method and ``AnsibleJSONDecoder`` in the ``_load`` or ``get`` method. + +For example cache plugins, see the source code for the `cache plugins included with Ansible Core `_. + +.. _developing_callbacks: + +Callback plugins +---------------- + +Callback plugins add new behaviors to Ansible when responding to events. By default, callback plugins control most of the output you see when running the command line programs. + +To create a callback plugin, create a new class with the Base(Callbacks) class as the parent: + +.. code-block:: python + + from ansible.plugins.callback import CallbackBase + + class CallbackModule(CallbackBase): + pass + +From there, override the specific methods from the CallbackBase that you want to provide a callback for. +For plugins intended for use with Ansible version 2.0 and later, you should only override methods that start with ``v2``. +For a complete list of methods that you can override, please see ``__init__.py`` in the +`lib/ansible/plugins/callback `_ directory. + +The following is a modified example of how Ansible's timer plugin is implemented, +but with an extra option so you can see how configuration works in Ansible version 2.4 and later: + +.. code-block:: python + + # Make coding more python3-ish, this is required for contributions to Ansible + from __future__ import (absolute_import, division, print_function) + __metaclass__ = type + + # not only visible to ansible-doc, it also 'declares' the options the plugin requires and how to configure them. + DOCUMENTATION = ''' + callback: timer + callback_type: aggregate + requirements: + - whitelist in configuration + short_description: Adds time to play stats + version_added: "2.0" # for collections, use the collection version, not the Ansible version + description: + - This callback just adds total play duration to the play stats. + options: + format_string: + description: format of the string shown to user at play end + ini: + - section: callback_timer + key: format_string + env: + - name: ANSIBLE_CALLBACK_TIMER_FORMAT + default: "Playbook run took %s days, %s hours, %s minutes, %s seconds" + ''' + from datetime import datetime + + from ansible.plugins.callback import CallbackBase + + + class CallbackModule(CallbackBase): + """ + This callback module tells you how long your plays ran for. + """ + CALLBACK_VERSION = 2.0 + CALLBACK_TYPE = 'aggregate' + CALLBACK_NAME = 'namespace.collection_name.timer' + + # only needed if you ship it and don't want to enable by default + CALLBACK_NEEDS_WHITELIST = True + + def __init__(self): + + # make sure the expected objects are present, calling the base's __init__ + super(CallbackModule, self).__init__() + + # start the timer when the plugin is loaded, the first play should start a few milliseconds after. + self.start_time = datetime.now() + + def _days_hours_minutes_seconds(self, runtime): + ''' internal helper method for this callback ''' + minutes = (runtime.seconds // 60) % 60 + r_seconds = runtime.seconds - (minutes * 60) + return runtime.days, runtime.seconds // 3600, minutes, r_seconds + + # this is only event we care about for display, when the play shows its summary stats; the rest are ignored by the base class + def v2_playbook_on_stats(self, stats): + end_time = datetime.now() + runtime = end_time - self.start_time + + # Shows the usage of a config option declared in the DOCUMENTATION variable. Ansible will have set it when it loads the plugin. + # Also note the use of the display object to print to screen. This is available to all callbacks, and you should use this over printing yourself + self._display.display(self._plugin_options['format_string'] % (self._days_hours_minutes_seconds(runtime))) + +Note that the ``CALLBACK_VERSION`` and ``CALLBACK_NAME`` definitions are required for properly functioning plugins for Ansible version 2.0 and later. ``CALLBACK_TYPE`` is mostly needed to distinguish 'stdout' plugins from the rest, since you can only load one plugin that writes to stdout. + +For example callback plugins, see the source code for the `callback plugins included with Ansible Core `_ + +.. _developing_connection_plugins: + +Connection plugins +------------------ + +Connection plugins allow Ansible to connect to the target hosts so it can execute tasks on them. Ansible ships with many connection plugins, but only one can be used per host at a time. The most commonly used connection plugins are the ``paramiko`` SSH, native ssh (just called ``ssh``), and ``local`` connection types. All of these can be used in playbooks and with ``/usr/bin/ansible`` to connect to remote machines. + +Ansible version 2.1 introduced the ``smart`` connection plugin. The ``smart`` connection type allows Ansible to automatically select either the ``paramiko`` or ``openssh`` connection plugin based on system capabilities, or the ``ssh`` connection plugin if OpenSSH supports ControlPersist. + +To create a new connection plugin (for example, to support SNMP, Message bus, or other transports), copy the format of one of the existing connection plugins and drop it into ``connection`` directory on your :ref:`local plugin path `. + +Connection plugins can support common options (such as the ``--timeout`` flag) by defining an entry in the documentation for the attribute name (in this case ``timeout``). If the common option has a non-null default, the plugin should define the same default since a different default would be ignored. + +For example connection plugins, see the source code for the `connection plugins included with Ansible Core `_. + +.. _developing_filter_plugins: + +Filter plugins +-------------- + +Filter plugins manipulate data. They are a feature of Jinja2 and are also available in Jinja2 templates used by the ``template`` module. As with all plugins, they can be easily extended, but instead of having a file for each one you can have several per file. Most of the filter plugins shipped with Ansible reside in a ``core.py``. + +Filter plugins do not use the standard configuration and documentation system described above. + +For example filter plugins, see the source code for the `filter plugins included with Ansible Core `_. + +.. _developing_inventory_plugins: + +Inventory plugins +----------------- + +Inventory plugins parse inventory sources and form an in-memory representation of the inventory. Inventory plugins were added in Ansible version 2.4. + +You can see the details for inventory plugins in the :ref:`developing_inventory` page. + +.. _developing_lookup_plugins: + +Lookup plugins +-------------- + +Lookup plugins pull in data from external data stores. Lookup plugins can be used within playbooks both for looping --- playbook language constructs like ``with_fileglob`` and ``with_items`` are implemented via lookup plugins --- and to return values into a variable or parameter. + +Lookup plugins are very flexible, allowing you to retrieve and return any type of data. When writing lookup plugins, always return data of a consistent type that can be easily consumed in a playbook. Avoid parameters that change the returned data type. If there is a need to return a single value sometimes and a complex dictionary other times, write two different lookup plugins. + +Ansible includes many :ref:`filters ` which can be used to manipulate the data returned by a lookup plugin. Sometimes it makes sense to do the filtering inside the lookup plugin, other times it is better to return results that can be filtered in the playbook. Keep in mind how the data will be referenced when determining the appropriate level of filtering to be done inside the lookup plugin. + +Here's a simple lookup plugin implementation --- this lookup returns the contents of a text file as a variable: + +.. code-block:: python + + # python 3 headers, required if submitting to Ansible + from __future__ import (absolute_import, division, print_function) + __metaclass__ = type + + DOCUMENTATION = """ + lookup: file + author: Daniel Hokka Zakrisson + version_added: "0.9" # for collections, use the collection version, not the Ansible version + short_description: read file contents + description: + - This lookup returns the contents from a file on the Ansible controller's file system. + options: + _terms: + description: path(s) of files to read + required: True + notes: + - if read in variable context, the file can be interpreted as YAML if the content is valid to the parser. + - this lookup does not understand globing --- use the fileglob lookup instead. + """ + from ansible.errors import AnsibleError, AnsibleParserError + from ansible.plugins.lookup import LookupBase + from ansible.utils.display import Display + + display = Display() + + + class LookupModule(LookupBase): + + def run(self, terms, variables=None, **kwargs): + + + # lookups in general are expected to both take a list as input and output a list + # this is done so they work with the looping construct 'with_'. + ret = [] + for term in terms: + display.debug("File lookup term: %s" % term) + + # Find the file in the expected search path, using a class method + # that implements the 'expected' search path for Ansible plugins. + lookupfile = self.find_file_in_search_path(variables, 'files', term) + + # Don't use print or your own logging, the display class + # takes care of it in a unified way. + display.vvvv(u"File lookup using %s as file" % lookupfile) + try: + if lookupfile: + contents, show_data = self._loader._get_file_contents(lookupfile) + ret.append(contents.rstrip()) + else: + # Always use ansible error classes to throw 'final' exceptions, + # so the Ansible engine will know how to deal with them. + # The Parser error indicates invalid options passed + raise AnsibleParserError() + except AnsibleParserError: + raise AnsibleError("could not locate file in lookup: %s" % term) + + return ret + +The following is an example of how this lookup is called:: + + --- + - hosts: all + vars: + contents: "{{ lookup('namespace.collection_name.file', '/etc/foo.txt') }}" + + tasks: + + - debug: + msg: the value of foo.txt is {{ contents }} as seen today {{ lookup('pipe', 'date +"%Y-%m-%d"') }} + +For example lookup plugins, see the source code for the `lookup plugins included with Ansible Core `_. + +For more usage examples of lookup plugins, see :ref:`Using Lookups`. + +.. _developing_test_plugins: + +Test plugins +------------ + +Test plugins verify data. They are a feature of Jinja2 and are also available in Jinja2 templates used by the ``template`` module. As with all plugins, they can be easily extended, but instead of having a file for each one you can have several per file. Most of the test plugins shipped with Ansible reside in a ``core.py``. These are specially useful in conjunction with some filter plugins like ``map`` and ``select``; they are also available for conditional directives like ``when:``. + +Test plugins do not use the standard configuration and documentation system described above. + +For example test plugins, see the source code for the `test plugins included with Ansible Core `_. + +.. _developing_vars_plugins: + +Vars plugins +------------ + +Vars plugins inject additional variable data into Ansible runs that did not come from an inventory source, playbook, or command line. Playbook constructs like 'host_vars' and 'group_vars' work using vars plugins. + +Vars plugins were partially implemented in Ansible 2.0 and rewritten to be fully implemented starting with Ansible 2.4. Vars plugins are unsupported by collections. + +Older plugins used a ``run`` method as their main body/work: + +.. code-block:: python + + def run(self, name, vault_password=None): + pass # your code goes here + + +Ansible 2.0 did not pass passwords to older plugins, so vaults were unavailable. +Most of the work now happens in the ``get_vars`` method which is called from the VariableManager when needed. + +.. code-block:: python + + def get_vars(self, loader, path, entities): + pass # your code goes here + +The parameters are: + + * loader: Ansible's DataLoader. The DataLoader can read files, auto-load JSON/YAML and decrypt vaulted data, and cache read files. + * path: this is 'directory data' for every inventory source and the current play's playbook directory, so they can search for data in reference to them. ``get_vars`` will be called at least once per available path. + * entities: these are host or group names that are pertinent to the variables needed. The plugin will get called once for hosts and again for groups. + +This ``get_vars`` method just needs to return a dictionary structure with the variables. + +Since Ansible version 2.4, vars plugins only execute as needed when preparing to execute a task. This avoids the costly 'always execute' behavior that occurred during inventory construction in older versions of Ansible. Since Ansible version 2.10, vars plugin execution can be toggled by the user to run when preparing to execute a task or after importing an inventory source. + +Since Ansible 2.10, vars plugins can require whitelisting. Vars plugins that don't require whitelisting will run by default. To require whitelisting for your plugin set the class variable ``REQUIRES_WHITELIST``: + +.. code-block:: python + + class VarsModule(BaseVarsPlugin): + REQUIRES_WHITELIST = True + +Include the ``vars_plugin_staging`` documentation fragment to allow users to determine when vars plugins run. + +.. code-block:: python + + DOCUMENTATION = ''' + vars: custom_hostvars + version_added: "2.10" # for collections, use the collection version, not the Ansible version + short_description: Load custom host vars + description: Load custom host vars + options: + stage: + ini: + - key: stage + section: vars_custom_hostvars + env: + - name: ANSIBLE_VARS_PLUGIN_STAGE + extends_documentation_fragment: + - vars_plugin_staging + ''' + +Also since Ansible 2.10, vars plugins can reside in collections. Vars plugins in collections must require whitelisting to be functional. + +For example vars plugins, see the source code for the `vars plugins included with Ansible Core +`_. + +.. seealso:: + + :ref:`list_of_collections` + Browse existing collections, modules, and plugins + :ref:`developing_api` + Learn about the Python API for task execution + :ref:`developing_inventory` + Learn about how to develop dynamic inventory sources + :ref:`developing_modules_general` + Learn about how to write Ansible modules + `Mailing List `_ + The development mailing list + `irc.freenode.net `_ + #ansible IRC chat channel diff --git a/docs/docsite/rst/dev_guide/developing_program_flow_modules.rst b/docs/docsite/rst/dev_guide/developing_program_flow_modules.rst new file mode 100644 index 00000000..5300fb55 --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_program_flow_modules.rst @@ -0,0 +1,880 @@ +.. _flow_modules: +.. _developing_program_flow_modules: + +*************************** +Ansible module architecture +*************************** + +If you are working on the ``ansible-base`` code, writing an Ansible module, or developing an action plugin, you may need to understand how Ansible's program flow executes. If you are just using Ansible Modules in playbooks, you can skip this section. + +.. contents:: + :local: + +.. _flow_types_of_modules: + +Types of modules +================ + +Ansible supports several different types of modules in its code base. Some of +these are for backwards compatibility and others are to enable flexibility. + +.. _flow_action_plugins: + +Action plugins +-------------- + +Action plugins look like modules to anyone writing a playbook. Usage documentation for most action plugins lives inside a module of the same name. Some action plugins do all the work, with the module providing only documentation. Some action plugins execute modules. The ``normal`` action plugin executes modules that don't have special action plugins. Action plugins always execute on the controller. + +Some action plugins do all their work on the controller. For +example, the :ref:`debug ` action plugin (which prints text for +the user to see) and the :ref:`assert ` action plugin (which +tests whether values in a playbook satisfy certain criteria) execute entirely on the controller. + +Most action plugins set up some values on the controller, then invoke an +actual module on the managed node that does something with these values. For example, the :ref:`template ` action plugin takes values from +the user to construct a file in a temporary location on the controller using +variables from the playbook environment. It then transfers the temporary file +to a temporary file on the remote system. After that, it invokes the +:ref:`copy module ` which operates on the remote system to move the file +into its final location, sets file permissions, and so on. + +.. _flow_new_style_modules: + +New-style modules +----------------- + +All of the modules that ship with Ansible fall into this category. While you can write modules in any language, all official modules (shipped with Ansible) use either Python or PowerShell. + +New-style modules have the arguments to the module embedded inside of them in +some manner. Old-style modules must copy a separate file over to the +managed node, which is less efficient as it requires two over-the-wire +connections instead of only one. + +.. _flow_python_modules: + +Python +^^^^^^ + +New-style Python modules use the :ref:`Ansiballz` framework for constructing +modules. These modules use imports from :code:`ansible.module_utils` to pull in +boilerplate module code, such as argument parsing, formatting of return +values as :term:`JSON`, and various file operations. + +.. note:: In Ansible, up to version 2.0.x, the official Python modules used the + :ref:`module_replacer` framework. For module authors, :ref:`Ansiballz` is + largely a superset of :ref:`module_replacer` functionality, so you usually + do not need to understand the differences between them. + +.. _flow_powershell_modules: + +PowerShell +^^^^^^^^^^ + +New-style PowerShell modules use the :ref:`module_replacer` framework for +constructing modules. These modules get a library of PowerShell code embedded +in them before being sent to the managed node. + +.. _flow_jsonargs_modules: + +JSONARGS modules +---------------- + +These modules are scripts that include the string +``<>`` in their body. +This string is replaced with the JSON-formatted argument string. These modules typically set a variable to that value like this: + +.. code-block:: python + + json_arguments = """<>""" + +Which is expanded as: + +.. code-block:: python + + json_arguments = """{"param1": "test's quotes", "param2": "\"To be or not to be\" - Hamlet"}""" + +.. note:: Ansible outputs a :term:`JSON` string with bare quotes. Double quotes are + used to quote string values, double quotes inside of string values are + backslash escaped, and single quotes may appear unescaped inside of + a string value. To use JSONARGS, your scripting language must have a way + to handle this type of string. The example uses Python's triple quoted + strings to do this. Other scripting languages may have a similar quote + character that won't be confused by any quotes in the JSON or it may + allow you to define your own start-of-quote and end-of-quote characters. + If the language doesn't give you any of these then you'll need to write + a :ref:`non-native JSON module ` or + :ref:`Old-style module ` instead. + +These modules typically parse the contents of ``json_arguments`` using a JSON +library and then use them as native variables throughout the code. + +.. _flow_want_json_modules: + +Non-native want JSON modules +---------------------------- + +If a module has the string ``WANT_JSON`` in it anywhere, Ansible treats +it as a non-native module that accepts a filename as its only command line +parameter. The filename is for a temporary file containing a :term:`JSON` +string containing the module's parameters. The module needs to open the file, +read and parse the parameters, operate on the data, and print its return data +as a JSON encoded dictionary to stdout before exiting. + +These types of modules are self-contained entities. As of Ansible 2.1, Ansible +only modifies them to change a shebang line if present. + +.. seealso:: Examples of Non-native modules written in ruby are in the `Ansible + for Rubyists `_ repository. + +.. _flow_binary_modules: + +Binary modules +-------------- + +From Ansible 2.2 onwards, modules may also be small binary programs. Ansible +doesn't perform any magic to make these portable to different systems so they +may be specific to the system on which they were compiled or require other +binary runtime dependencies. Despite these drawbacks, you may have +to compile a custom module against a specific binary +library if that's the only way to get access to certain resources. + +Binary modules take their arguments and return data to Ansible in the same +way as :ref:`want JSON modules `. + +.. seealso:: One example of a `binary module + `_ + written in go. + +.. _flow_old_style_modules: + +Old-style modules +----------------- + +Old-style modules are similar to +:ref:`want JSON modules `, except that the file that +they take contains ``key=value`` pairs for their parameters instead of +:term:`JSON`. Ansible decides that a module is old-style when it doesn't have +any of the markers that would show that it is one of the other types. + +.. _flow_how_modules_are_executed: + +How modules are executed +======================== + +When a user uses :program:`ansible` or :program:`ansible-playbook`, they +specify a task to execute. The task is usually the name of a module along +with several parameters to be passed to the module. Ansible takes these +values and processes them in various ways before they are finally executed on +the remote machine. + +.. _flow_executor_task_executor: + +Executor/task_executor +---------------------- + +The TaskExecutor receives the module name and parameters that were parsed from +the :term:`playbook ` (or from the command line in the case of +:command:`/usr/bin/ansible`). It uses the name to decide whether it's looking +at a module or an :ref:`Action Plugin `. If it's +a module, it loads the :ref:`Normal Action Plugin ` +and passes the name, variables, and other information about the task and play +to that Action Plugin for further processing. + +.. _flow_normal_action_plugin: + +The ``normal`` action plugin +---------------------------- + +The ``normal`` action plugin executes the module on the remote host. It is +the primary coordinator of much of the work to actually execute the module on +the managed machine. + +* It loads the appropriate connection plugin for the task, which then transfers + or executes as needed to create a connection to that host. +* It adds any internal Ansible properties to the module's parameters (for + instance, the ones that pass along ``no_log`` to the module). +* It works with other plugins (connection, shell, become, other action plugins) + to create any temporary files on the remote machine and + cleans up afterwards. +* It pushes the module and module parameters to the + remote host, although the :ref:`module_common ` + code described in the next section decides which format + those will take. +* It handles any special cases regarding modules (for instance, async + execution, or complications around Windows modules that must have the same names as Python modules, so that internal calling of modules from other Action Plugins work.) + +Much of this functionality comes from the `BaseAction` class, +which lives in :file:`plugins/action/__init__.py`. It uses the +``Connection`` and ``Shell`` objects to do its work. + +.. note:: + When :term:`tasks ` are run with the ``async:`` parameter, Ansible + uses the ``async`` Action Plugin instead of the ``normal`` Action Plugin + to invoke it. That program flow is currently not documented. Read the + source for information on how that works. + +.. _flow_executor_module_common: + +Executor/module_common.py +------------------------- + +Code in :file:`executor/module_common.py` assembles the module +to be shipped to the managed node. The module is first read in, then examined +to determine its type: + +* :ref:`PowerShell ` and :ref:`JSON-args modules ` are passed through :ref:`Module Replacer `. +* New-style :ref:`Python modules ` are assembled by :ref:`Ansiballz`. +* :ref:`Non-native-want-JSON `, :ref:`Binary modules `, and :ref:`Old-Style modules ` aren't touched by either of these and pass through unchanged. + +After the assembling step, one final +modification is made to all modules that have a shebang line. Ansible checks +whether the interpreter in the shebang line has a specific path configured via +an ``ansible_$X_interpreter`` inventory variable. If it does, Ansible +substitutes that path for the interpreter path given in the module. After +this, Ansible returns the complete module data and the module type to the +:ref:`Normal Action ` which continues execution of +the module. + +Assembler frameworks +-------------------- + +Ansible supports two assembler frameworks: Ansiballz and the older Module Replacer. + +.. _module_replacer: + +Module Replacer framework +^^^^^^^^^^^^^^^^^^^^^^^^^ + +The Module Replacer framework is the original framework implementing new-style +modules, and is still used for PowerShell modules. It is essentially a preprocessor (like the C Preprocessor for those +familiar with that programming language). It does straight substitutions of +specific substring patterns in the module file. There are two types of +substitutions: + +* Replacements that only happen in the module file. These are public + replacement strings that modules can utilize to get helpful boilerplate or + access to arguments. + + - :code:`from ansible.module_utils.MOD_LIB_NAME import *` is replaced with the + contents of the :file:`ansible/module_utils/MOD_LIB_NAME.py` These should + only be used with :ref:`new-style Python modules `. + - :code:`#<>` is equivalent to + :code:`from ansible.module_utils.basic import *` and should also only apply + to new-style Python modules. + - :code:`# POWERSHELL_COMMON` substitutes the contents of + :file:`ansible/module_utils/powershell.ps1`. It should only be used with + :ref:`new-style Powershell modules `. + +* Replacements that are used by ``ansible.module_utils`` code. These are internal replacement patterns. They may be used internally, in the above public replacements, but shouldn't be used directly by modules. + + - :code:`"<>"` is substituted with the Ansible version. In + :ref:`new-style Python modules ` under the + :ref:`Ansiballz` framework the proper way is to instead instantiate an + `AnsibleModule` and then access the version from + :attr:``AnsibleModule.ansible_version``. + - :code:`"<>"` is substituted with + a string which is the Python ``repr`` of the :term:`JSON` encoded module + parameters. Using ``repr`` on the JSON string makes it safe to embed in + a Python file. In new-style Python modules under the Ansiballz framework + this is better accessed by instantiating an `AnsibleModule` and + then using :attr:`AnsibleModule.params`. + - :code:`<>` substitutes a string which is + a comma separated list of file systems which have a file system dependent + security context in SELinux. In new-style Python modules, if you really + need this you should instantiate an `AnsibleModule` and then use + :attr:`AnsibleModule._selinux_special_fs`. The variable has also changed + from a comma separated string of file system names to an actual python + list of filesystem names. + - :code:`<>` substitutes the module + parameters as a JSON string. Care must be taken to properly quote the + string as JSON data may contain quotes. This pattern is not substituted + in new-style Python modules as they can get the module parameters another + way. + - The string :code:`syslog.LOG_USER` is replaced wherever it occurs with the + ``syslog_facility`` which was named in :file:`ansible.cfg` or any + ``ansible_syslog_facility`` inventory variable that applies to this host. In + new-style Python modules this has changed slightly. If you really need to + access it, you should instantiate an `AnsibleModule` and then use + :attr:`AnsibleModule._syslog_facility` to access it. It is no longer the + actual syslog facility and is now the name of the syslog facility. See + the :ref:`documentation on internal arguments ` + for details. + +.. _Ansiballz: + +Ansiballz framework +^^^^^^^^^^^^^^^^^^^ + +The Ansiballz framework was adopted in Ansible 2.1 and is used for all new-style Python modules. Unlike the Module Replacer, Ansiballz uses real Python imports of things in +:file:`ansible/module_utils` instead of merely preprocessing the module. It +does this by constructing a zipfile -- which includes the module file, files +in :file:`ansible/module_utils` that are imported by the module, and some +boilerplate to pass in the module's parameters. The zipfile is then Base64 +encoded and wrapped in a small Python script which decodes the Base64 encoding +and places the zipfile into a temp directory on the managed node. It then +extracts just the Ansible module script from the zip file and places that in +the temporary directory as well. Then it sets the PYTHONPATH to find Python +modules inside of the zip file and imports the Ansible module as the special name, ``__main__``. +Importing it as ``__main__`` causes Python to think that it is executing a script rather than simply +importing a module. This lets Ansible run both the wrapper script and the module code in a single copy of Python on the remote machine. + +.. note:: + * Ansible wraps the zipfile in the Python script for two reasons: + + * for compatibility with Python 2.6 which has a less + functional version of Python's ``-m`` command line switch. + + * so that pipelining will function properly. Pipelining needs to pipe the + Python module into the Python interpreter on the remote node. Python + understands scripts on stdin but does not understand zip files. + + * Prior to Ansible 2.7, the module was executed via a second Python interpreter instead of being + executed inside of the same process. This change was made once Python-2.4 support was dropped + to speed up module execution. + +In Ansiballz, any imports of Python modules from the +:py:mod:`ansible.module_utils` package trigger inclusion of that Python file +into the zipfile. Instances of :code:`#<>` in +the module are turned into :code:`from ansible.module_utils.basic import *` +and :file:`ansible/module-utils/basic.py` is then included in the zipfile. +Files that are included from :file:`module_utils` are themselves scanned for +imports of other Python modules from :file:`module_utils` to be included in +the zipfile as well. + +.. warning:: + At present, the Ansiballz Framework cannot determine whether an import + should be included if it is a relative import. Always use an absolute + import that has :py:mod:`ansible.module_utils` in it to allow Ansiballz to + determine that the file should be included. + + +.. _flow_passing_module_args: + +Passing args +------------ + +Arguments are passed differently by the two frameworks: + +* In :ref:`module_replacer`, module arguments are turned into a JSON-ified string and substituted into the combined module file. +* In :ref:`Ansiballz`, the JSON-ified string is part of the script which wraps the zipfile. Just before the wrapper script imports the Ansible module as ``__main__``, it monkey-patches the private, ``_ANSIBLE_ARGS`` variable in ``basic.py`` with the variable values. When a :class:`ansible.module_utils.basic.AnsibleModule` is instantiated, it parses this string and places the args into :attr:`AnsibleModule.params` where it can be accessed by the module's other code. + +.. warning:: + If you are writing modules, remember that the way we pass arguments is an internal implementation detail: it has changed in the past and will change again as soon as changes to the common module_utils + code allow Ansible modules to forgo using :class:`ansible.module_utils.basic.AnsibleModule`. Do not rely on the internal global ``_ANSIBLE_ARGS`` variable. + + Very dynamic custom modules which need to parse arguments before they + instantiate an ``AnsibleModule`` may use ``_load_params`` to retrieve those parameters. + Although ``_load_params`` may change in breaking ways if necessary to support + changes in the code, it is likely to be more stable than either the way we pass parameters or the internal global variable. + +.. note:: + Prior to Ansible 2.7, the Ansible module was invoked in a second Python interpreter and the + arguments were then passed to the script over the script's stdin. + + +.. _flow_internal_arguments: + +Internal arguments +------------------ + +Both :ref:`module_replacer` and :ref:`Ansiballz` send additional arguments to +the module beyond those which the user specified in the playbook. These +additional arguments are internal parameters that help implement global +Ansible features. Modules often do not need to know about these explicitly as +the features are implemented in :py:mod:`ansible.module_utils.basic` but certain +features need support from the module so it's good to know about them. + +The internal arguments listed here are global. If you need to add a local internal argument to a custom module, create an action plugin for that specific module - see ``_original_basename`` in the `copy action plugin `_ for an example. + +_ansible_no_log +^^^^^^^^^^^^^^^ + +Boolean. Set to True whenever a parameter in a task or play specifies ``no_log``. Any module that calls :py:meth:`AnsibleModule.log` handles this automatically. If a module implements its own logging then +it needs to check this value. To access in a module, instantiate an +``AnsibleModule`` and then check the value of :attr:`AnsibleModule.no_log`. + +.. note:: + ``no_log`` specified in a module's argument_spec is handled by a different mechanism. + +_ansible_debug +^^^^^^^^^^^^^^^ + +Boolean. Turns more verbose logging on or off and turns on logging of +external commands that the module executes. If a module uses +:py:meth:`AnsibleModule.debug` rather than :py:meth:`AnsibleModule.log` then +the messages are only logged if ``_ansible_debug`` is set to ``True``. +To set, add ``debug: True`` to :file:`ansible.cfg` or set the environment +variable :envvar:`ANSIBLE_DEBUG`. To access in a module, instantiate an +``AnsibleModule`` and access :attr:`AnsibleModule._debug`. + +_ansible_diff +^^^^^^^^^^^^^^^ + +Boolean. If a module supports it, tells the module to show a unified diff of +changes to be made to templated files. To set, pass the ``--diff`` command line +option. To access in a module, instantiate an `AnsibleModule` and access +:attr:`AnsibleModule._diff`. + +_ansible_verbosity +^^^^^^^^^^^^^^^^^^ + +Unused. This value could be used for finer grained control over logging. + +_ansible_selinux_special_fs +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +List. Names of filesystems which should have a special SELinux +context. They are used by the `AnsibleModule` methods which operate on +files (changing attributes, moving, and copying). To set, add a comma separated string of filesystem names in :file:`ansible.cfg`:: + + # ansible.cfg + [selinux] + special_context_filesystems=nfs,vboxsf,fuse,ramfs,vfat + +Most modules can use the built-in ``AnsibleModule`` methods to manipulate +files. To access in a module that needs to know about these special context filesystems, instantiate an ``AnsibleModule`` and examine the list in +:attr:`AnsibleModule._selinux_special_fs`. + +This replaces :attr:`ansible.module_utils.basic.SELINUX_SPECIAL_FS` from +:ref:`module_replacer`. In module replacer it was a comma separated string of +filesystem names. Under Ansiballz it's an actual list. + +.. versionadded:: 2.1 + +_ansible_syslog_facility +^^^^^^^^^^^^^^^^^^^^^^^^ + +This parameter controls which syslog facility Ansible module logs to. To set, change the ``syslog_facility`` value in :file:`ansible.cfg`. Most +modules should just use :meth:`AnsibleModule.log` which will then make use of +this. If a module has to use this on its own, it should instantiate an +`AnsibleModule` and then retrieve the name of the syslog facility from +:attr:`AnsibleModule._syslog_facility`. The Ansiballz code is less hacky than the old :ref:`module_replacer` code: + +.. code-block:: python + + # Old module_replacer way + import syslog + syslog.openlog(NAME, 0, syslog.LOG_USER) + + # New Ansiballz way + import syslog + facility_name = module._syslog_facility + facility = getattr(syslog, facility_name, syslog.LOG_USER) + syslog.openlog(NAME, 0, facility) + +.. versionadded:: 2.1 + +_ansible_version +^^^^^^^^^^^^^^^^ + +This parameter passes the version of Ansible that runs the module. To access +it, a module should instantiate an `AnsibleModule` and then retrieve it +from :attr:`AnsibleModule.ansible_version`. This replaces +:attr:`ansible.module_utils.basic.ANSIBLE_VERSION` from +:ref:`module_replacer`. + +.. versionadded:: 2.1 + + +.. _flow_module_return_values: + +Module return values & Unsafe strings +------------------------------------- + +At the end of a module's execution, it formats the data that it wants to return as a JSON string and prints the string to its stdout. The normal action plugin receives the JSON string, parses it into a Python dictionary, and returns it to the executor. + +If Ansible templated every string return value, it would be vulnerable to an attack from users with access to managed nodes. If an unscrupulous user disguised malicious code as Ansible return value strings, and if those strings were then templated on the controller, Ansible could execute arbitrary code. To prevent this scenario, Ansible marks all strings inside returned data as ``Unsafe``, emitting any Jinja2 templates in the strings verbatim, not expanded by Jinja2. + +Strings returned by invoking a module through ``ActionPlugin._execute_module()`` are automatically marked as ``Unsafe`` by the normal action plugin. If another action plugin retrieves information from a module through some other means, it must mark its return data as ``Unsafe`` on its own. + +In case a poorly-coded action plugin fails to mark its results as "Unsafe," Ansible audits the results again when they are returned to the executor, +marking all strings as ``Unsafe``. The normal action plugin protects itself and any other code that it calls with the result data as a parameter. The check inside the executor protects the output of all other action plugins, ensuring that subsequent tasks run by Ansible will not template anything from those results either. + +.. _flow_special_considerations: + +Special considerations +---------------------- + +.. _flow_pipelining: + +Pipelining +^^^^^^^^^^ + +Ansible can transfer a module to a remote machine in one of two ways: + +* it can write out the module to a temporary file on the remote host and then + use a second connection to the remote host to execute it with the + interpreter that the module needs +* or it can use what's known as pipelining to execute the module by piping it + into the remote interpreter's stdin. + +Pipelining only works with modules written in Python at this time because +Ansible only knows that Python supports this mode of operation. Supporting +pipelining means that whatever format the module payload takes before being +sent over the wire must be executable by Python via stdin. + +.. _flow_args_over_stdin: + +Why pass args over stdin? +^^^^^^^^^^^^^^^^^^^^^^^^^ + +Passing arguments via stdin was chosen for the following reasons: + +* When combined with :ref:`ANSIBLE_PIPELINING`, this keeps the module's arguments from + temporarily being saved onto disk on the remote machine. This makes it + harder (but not impossible) for a malicious user on the remote machine to + steal any sensitive information that may be present in the arguments. +* Command line arguments would be insecure as most systems allow unprivileged + users to read the full commandline of a process. +* Environment variables are usually more secure than the commandline but some + systems limit the total size of the environment. This could lead to + truncation of the parameters if we hit that limit. + + +.. _flow_ansiblemodule: + +AnsibleModule +------------- + +.. _argument_spec: + +Argument spec +^^^^^^^^^^^^^ + +The ``argument_spec`` provided to ``AnsibleModule`` defines the supported arguments for a module, as well as their type, defaults and more. + +Example ``argument_spec``: + +.. code-block:: python + + module = AnsibleModule(argument_spec=dict( + top_level=dict( + type='dict', + options=dict( + second_level=dict( + default=True, + type='bool', + ) + ) + ) + )) + +This section will discuss the behavioral attributes for arguments: + +:type: + + ``type`` allows you to define the type of the value accepted for the argument. The default value for ``type`` is ``str``. Possible values are: + + * str + * list + * dict + * bool + * int + * float + * path + * raw + * jsonarg + * json + * bytes + * bits + + The ``raw`` type, performs no type validation or type casting, and maintains the type of the passed value. + +:elements: + + ``elements`` works in combination with ``type`` when ``type='list'``. ``elements`` can then be defined as ``elements='int'`` or any other type, indicating that each element of the specified list should be of that type. + +:default: + + The ``default`` option allows sets a default value for the argument for the scenario when the argument is not provided to the module. When not specified, the default value is ``None``. + +:fallback: + + ``fallback`` accepts a ``tuple`` where the first argument is a callable (function) that will be used to perform the lookup, based on the second argument. The second argument is a list of values to be accepted by the callable. + + The most common callable used is ``env_fallback`` which will allow an argument to optionally use an environment variable when the argument is not supplied. + + Example: + + .. code-block:: python + + username=dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])) + +:choices: + + ``choices`` accepts a list of choices that the argument will accept. The types of ``choices`` should match the ``type``. + +:required: + + ``required`` accepts a boolean, either ``True`` or ``False`` that indicates that the argument is required. When not specified, ``required`` defaults to ``False``. This should not be used in combination with ``default``. + +:no_log: + + ``no_log`` accepts a boolean, either ``True`` or ``False``, that indicates explicitly whether or not the argument value should be masked in logs and output. + + .. note:: + In the absence of ``no_log``, if the parameter name appears to indicate that the argument value is a password or passphrase (such as "admin_password"), a warning will be shown and the value will be masked in logs but **not** output. To disable the warning and masking for parameters that do not contain sensitive information, set ``no_log`` to ``False``. + +:aliases: + + ``aliases`` accepts a list of alternative argument names for the argument, such as the case where the argument is ``name`` but the module accepts ``aliases=['pkg']`` to allow ``pkg`` to be interchangeably with ``name`` + +:options: + + ``options`` implements the ability to create a sub-argument_spec, where the sub options of the top level argument are also validated using the attributes discussed in this section. The example at the top of this section demonstrates use of ``options``. ``type`` or ``elements`` should be ``dict`` is this case. + +:apply_defaults: + + ``apply_defaults`` works alongside ``options`` and allows the ``default`` of the sub-options to be applied even when the top-level argument is not supplied. + + In the example of the ``argument_spec`` at the top of this section, it would allow ``module.params['top_level']['second_level']`` to be defined, even if the user does not provide ``top_level`` when calling the module. + +:removed_in_version: + + ``removed_in_version`` indicates which version of ansible-base or a collection a deprecated argument will be removed in. Mutually exclusive with ``removed_at_date``, and must be used with ``removed_from_collection``. + + Example: + + .. code-block:: python + + 'option': { + 'type': 'str', + 'removed_in_version': '2.0.0', + 'collection_name': 'testns.testcol', + }, + +:removed_at_date: + + ``removed_at_date`` indicates that a deprecated argument will be removed in a minor ansible-base release or major collection release after this date. Mutually exclusive with ``removed_in_version``, and must be used with ``removed_from_collection``. + + Example: + + .. code-block:: python + + 'option': { + 'type': 'str', + 'removed_at_date': '2020-12-31', + 'collection_name': 'testns.testcol', + }, + +:removed_from_collection: + + Specifies which collection (or ansible-base) deprecates this deprecated argument. Specify ``ansible.builtin`` for ansible-base, or the collection's name (format ``foo.bar``). Must be used with ``removed_in_version`` or ``removed_at_date``. + +:deprecated_aliases: + + Deprecates aliases of this argument. Must contain a list or tuple of dictionaries having some the following keys: + + :name: + + The name of the alias to deprecate. (Required.) + + :version: + + The version of ansible-base or the collection this alias will be removed in. Either ``version`` or ``date`` must be specified. + + :date: + + The a date after which a minor release of ansible-base or a major collection release will no longer contain this alias.. Either ``version`` or ``date`` must be specified. + + :collection_name: + + Specifies which collection (or ansible-base) deprecates this deprecated alias. Specify ``ansible.builtin`` for ansible-base, or the collection's name (format ``foo.bar``). Must be used with ``version`` or ``date``. + + Examples: + + .. code-block:: python + + 'option': { + 'type': 'str', + 'aliases': ['foo', 'bar'], + 'depecated_aliases': [ + { + 'name': 'foo', + 'version': '2.0.0', + 'collection_name': 'testns.testcol', + }, + { + 'name': 'foo', + 'date': '2020-12-31', + 'collection_name': 'testns.testcol', + }, + ], + }, + + +:mutually_exclusive: + + If ``options`` is specified, ``mutually_exclusive`` refers to the sub-options described in ``options`` and behaves as in :ref:`argument_spec_dependencies`. + +:required_together: + + If ``options`` is specified, ``required_together`` refers to the sub-options described in ``options`` and behaves as in :ref:`argument_spec_dependencies`. + +:required_one_of: + + If ``options`` is specified, ``required_one_of`` refers to the sub-options described in ``options`` and behaves as in :ref:`argument_spec_dependencies`. + +:required_if: + + If ``options`` is specified, ``required_if`` refers to the sub-options described in ``options`` and behaves as in :ref:`argument_spec_dependencies`. + +:required_by: + + If ``options`` is specified, ``required_by`` refers to the sub-options described in ``options`` and behaves as in :ref:`argument_spec_dependencies`. + + +.. _argument_spec_dependencies: + +Dependencies between module options +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The following are optional arguments for ``AnsibleModule()``: + +.. code-block:: python + + module = AnsibleModule( + argument_spec, + mutually_exclusive=[ + ('path', 'content'), + ], + required_one_of=[ + ('path', 'content'), + ], + ) + +:mutually_exclusive: + + Must be a sequence (list or tuple) of sequences of strings. Every sequence of strings is a list of option names which are mutually exclusive. If more than one options of a list are specified together, Ansible will fail the module with an error. + + Example: + + .. code-block:: python + + mutually_exclusive=[ + ('path', 'content'), + ('repository_url', 'repository_filename'), + ], + + In this example, the options ``path`` and ``content`` must not specified at the same time. Also the options ``repository_url`` and ``repository_filename`` must not be specified at the same time. But specifying ``path`` and ``repository_url`` is accepted. + + To ensure that precisely one of two (or more) options is specified, combine ``mutually_exclusive`` with ``required_one_of``. + +:required_together: + + Must be a sequence (list or tuple) of sequences of strings. Every sequence of strings is a list of option names which are must be specified together. If at least one of these options are specified, the other ones from the same sequence must all be present. + + Example: + + .. code-block:: python + + required_together=[ + ('file_path', 'file_hash'), + ], + + In this example, if one of the options ``file_path`` or ``file_hash`` is specified, Ansible will fail the module with an error if the other one is not specified. + +:required_one_of: + + Must be a sequence (list or tuple) of sequences of strings. Every sequence of strings is a list of option names from which at least one must be specified. If none one of these options are specified, Ansible will fail module execution. + + Example: + + .. code-block:: python + + required_one_of=[ + ('path', 'content'), + ], + + In this example, at least one of ``path`` and ``content`` must be specified. If none are specified, execution will fail. Specifying both is explicitly allowed; to prevent this, combine ``required_one_of`` with ``mutually_exclusive``. + +:required_if: + + Must be a sequence of sequences. Every inner sequence describes one conditional dependency. Every sequence must have three or four values. The first two values are the option's name and the option's value which describes the condition. The further elements of the sequence are only needed if the option of that name has precisely this value. + + If you want that all options in a list of option names are specified if the condition is met, use one of the following forms: + + .. code-block:: python + + ('option_name', option_value, ('option_a', 'option_b', ...)), + ('option_name', option_value, ('option_a', 'option_b', ...), False), + + If you want that at least one option of a list of option names is specified if the condition is met, use the following form: + + .. code-block:: python + + ('option_name', option_value, ('option_a', 'option_b', ...), True), + + Example: + + .. code-block:: python + + required_if=[ + ('state', 'present', ('path', 'content'), True), + ('force', True, ('force_reason', 'force_code')), + ], + + In this example, if the user specifies ``state=present``, at least one of the options ``path`` and ``content`` must be supplied (or both). To make sure that precisely one can be specified, combine ``required_if`` with ``mutually_exclusive``. + + On the other hand, if ``force`` (a boolean parameter) is set to ``true``, ``yes`` etc., both ``force_reason`` and ``force_code`` must be specified. + +:required_by: + + Must be a dictionary mapping option names to sequences of option names. If the option name in a dictionary key is specified, the option names it maps to must all also be specified. Note that instead of a sequence of option names, you can also specify one single option name. + + Example: + + .. code-block:: python + + required_by={ + 'force': 'force_reason', + 'path': ('mode', 'owner', 'group'), + }, + + In the example, if ``force`` is specified, ``force_reason`` must also be specified. Also, if ``path`` is specified, then three three options ``mode``, ``owner`` and ``group`` also must be specified. + +Declaring check mode support +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +To declare that a module supports check mode, supply ``supports_check_mode=True`` to the ``AnsibleModule()`` call: + +.. code-block:: python + + module = AnsibleModule(argument_spec, supports_check_mode=True) + +The module can determine whether it is called in check mode by checking the boolean value ``module.check_mode``. If it evaluates to ``True``, the module must take care not to do any modification. + +If ``supports_check_mode=False`` is specified, which is the default value, the module will exit in check mode with ``skipped=True`` and message ``remote module () does not support check mode``. + +Adding file options +^^^^^^^^^^^^^^^^^^^ + +To declare that a module should add support for all common file options, supply ``add_file_common_args=True`` to the ``AnsibleModule()`` call: + +.. code-block:: python + + module = AnsibleModule(argument_spec, add_file_common_args=True) + +You can find `a list of all file options here `_. It is recommended that you make your ``DOCUMENTATION`` extend the doc fragment ``ansible.builtin.files`` (see :ref:`module_docs_fragments`) in this case, to make sure that all these fields are correctly documented. + +The helper functions ``module.load_file_common_arguments()`` and ``module.set_fs_attributes_if_different()`` can be used to handle these arguments for you: + +.. code-block:: python + + argument_spec = { + 'path': { + 'type': 'str', + 'required': True, + }, + } + + module = AnsibleModule(argument_spec, add_file_common_args=True) + changed = False + + # TODO do something with module.params['path'], like update it's contents + + # Ensure that module.params['path'] satisfies the file options supplied by the user + file_args = module.load_file_common_arguments(module.params) + changed = module.set_fs_attributes_if_different(file_args, changed) + + module.exit_json(changed=changed) diff --git a/docs/docsite/rst/dev_guide/developing_python_3.rst b/docs/docsite/rst/dev_guide/developing_python_3.rst new file mode 100644 index 00000000..3713e412 --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_python_3.rst @@ -0,0 +1,404 @@ +.. _developing_python_3: + +******************** +Ansible and Python 3 +******************** + +The ``ansible-base`` code runs on both Python 2 and Python 3 because we want Ansible to be able to manage a wide +variety of machines. Contributors to ansible-base and to Ansible Collections should be aware of the tips in this document so that they can write code that will run on the same versions of Python as the rest of Ansible. + +.. contents:: + :local: + +To ensure that your code runs on Python 3 as well as on Python 2, learn the tips and tricks and idioms +described here. Most of these considerations apply to all three types of Ansible code: + +1. controller-side code - code that runs on the machine where you invoke :command:`/usr/bin/ansible` +2. modules - the code which Ansible transmits to and invokes on the managed machine. +3. shared ``module_utils`` code - the common code that's used by modules to perform tasks and sometimes used by controller-side code as well + +However, the three types of code do not use the same string strategy. If you're developing a module or some ``module_utils`` code, be sure to read the section on string strategy carefully. + +Minimum version of Python 3.x and Python 2.x +============================================ + +On the controller we support Python 3.5 or greater and Python 2.7 or greater. Module-side, we +support Python 3.5 or greater and Python 2.6 or greater. + +Python 3.5 was chosen as a minimum because it is the earliest Python 3 version adopted as the +default Python by a Long Term Support (LTS) Linux distribution (in this case, Ubuntu-16.04). +Previous LTS Linux distributions shipped with a Python 2 version which users can rely upon instead +of the Python 3 version. + +For Python 2, the default is for modules to run on at least Python 2.6. This allows +users with older distributions that are stuck on Python 2.6 to manage their +machines. Modules are allowed to drop support for Python 2.6 when one of +their dependent libraries requires a higher version of Python. This is not an +invitation to add unnecessary dependent libraries in order to force your +module to be usable only with a newer version of Python; instead it is an +acknowledgment that some libraries (for instance, boto3 and docker-py) will +only function with a newer version of Python. + +.. note:: Python 2.4 Module-side Support: + + Support for Python 2.4 and Python 2.5 was dropped in Ansible-2.4. RHEL-5 + (and its rebuilds like CentOS-5) were supported until April of 2017. + Ansible-2.3 was released in April of 2017 and was the last Ansible release + to support Python 2.4 on the module-side. + +Developing Ansible code that supports Python 2 and Python 3 +=========================================================== + +The best place to start learning about writing code that supports both Python 2 and Python 3 +is `Lennart Regebro's book: Porting to Python 3 `_. +The book describes several strategies for porting to Python 3. The one we're +using is `to support Python 2 and Python 3 from a single code base +`_ + +Understanding strings in Python 2 and Python 3 +---------------------------------------------- + +Python 2 and Python 3 handle strings differently, so when you write code that supports Python 3 +you must decide what string model to use. Strings can be an array of bytes (like in C) or +they can be an array of text. Text is what we think of as letters, digits, +numbers, other printable symbols, and a small number of unprintable "symbols" +(control codes). + +In Python 2, the two types for these (:class:`str ` for bytes and +:func:`unicode ` for text) are often used interchangeably. When dealing only +with ASCII characters, the strings can be combined, compared, and converted +from one type to another automatically. When non-ASCII characters are +introduced, Python 2 starts throwing exceptions due to not knowing what encoding +the non-ASCII characters should be in. + +Python 3 changes this behavior by making the separation between bytes (:class:`bytes `) +and text (:class:`str `) more strict. Python 3 will throw an exception when +trying to combine and compare the two types. The programmer has to explicitly +convert from one type to the other to mix values from each. + +In Python 3 it's immediately apparent to the programmer when code is +mixing the byte and text types inappropriately, whereas in Python 2, code that mixes those types +may work until a user causes an exception by entering non-ASCII input. +Python 3 forces programmers to proactively define a strategy for +working with strings in their program so that they don't mix text and byte strings unintentionally. + +Ansible uses different strategies for working with strings in controller-side code, in +:ref: `modules `, and in :ref:`module_utils ` code. + +.. _controller_string_strategy: + +Controller string strategy: the Unicode Sandwich +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +In controller-side code we use a strategy known as the Unicode Sandwich (named +after Python 2's :func:`unicode ` text type). For Unicode Sandwich we know that +at the border of our code and the outside world (for example, file and network IO, +environment variables, and some library calls) we are going to receive bytes. +We need to transform these bytes into text and use that throughout the +internal portions of our code. When we have to send those strings back out to +the outside world we first convert the text back into bytes. +To visualize this, imagine a 'sandwich' consisting of a top and bottom layer +of bytes, a layer of conversion between, and all text type in the center. + +Unicode Sandwich common borders: places to convert bytes to text in controller code +----------------------------------------------------------------------------------- + +This is a partial list of places where we have to convert to and from bytes +when using the Unicode Sandwich string strategy. It's not exhaustive but +it gives you an idea of where to watch for problems. + +Reading and writing to files +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +In Python 2, reading from files yields bytes. In Python 3, it can yield text. +To make code that's portable to both we don't make use of Python 3's ability +to yield text but instead do the conversion explicitly ourselves. For example: + +.. code-block:: python + + from ansible.module_utils._text import to_text + + with open('filename-with-utf8-data.txt', 'rb') as my_file: + b_data = my_file.read() + try: + data = to_text(b_data, errors='surrogate_or_strict') + except UnicodeError: + # Handle the exception gracefully -- usually by displaying a good + # user-centric error message that can be traced back to this piece + # of code. + pass + +.. note:: Much of Ansible assumes that all encoded text is UTF-8. At some + point, if there is demand for other encodings we may change that, but for + now it is safe to assume that bytes are UTF-8. + +Writing to files is the opposite process: + +.. code-block:: python + + from ansible.module_utils._text import to_bytes + + with open('filename.txt', 'wb') as my_file: + my_file.write(to_bytes(some_text_string)) + +Note that we don't have to catch :exc:`UnicodeError` here because we're +transforming to UTF-8 and all text strings in Python can be transformed back +to UTF-8. + +Filesystem interaction +^^^^^^^^^^^^^^^^^^^^^^ + +Dealing with filenames often involves dropping back to bytes because on UNIX-like +systems filenames are bytes. On Python 2, if we pass a text string to these +functions, the text string will be converted to a byte string inside of the +function and a traceback will occur if non-ASCII characters are present. In +Python 3, a traceback will only occur if the text string can't be decoded in +the current locale, but it's still good to be explicit and have code which +works on both versions: + +.. code-block:: python + + import os.path + + from ansible.module_utils._text import to_bytes + + filename = u'/var/tmp/くらとみ.txt' + f = open(to_bytes(filename), 'wb') + mtime = os.path.getmtime(to_bytes(filename)) + b_filename = os.path.expandvars(to_bytes(filename)) + if os.path.exists(to_bytes(filename)): + pass + +When you are only manipulating a filename as a string without talking to the +filesystem (or a C library which talks to the filesystem) you can often get +away without converting to bytes: + +.. code-block:: python + + import os.path + + os.path.join(u'/var/tmp/café', u'くらとみ') + os.path.split(u'/var/tmp/café/くらとみ') + +On the other hand, if the code needs to manipulate the filename and also talk +to the filesystem, it can be more convenient to transform to bytes right away +and manipulate in bytes. + +.. warning:: Make sure all variables passed to a function are the same type. + If you're working with something like :func:`python3:os.path.join` which takes + multiple strings and uses them in combination, you need to make sure that + all the types are the same (either all bytes or all text). Mixing + bytes and text will cause tracebacks. + +Interacting with other programs +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Interacting with other programs goes through the operating system and +C libraries and operates on things that the UNIX kernel defines. These +interfaces are all byte-oriented so the Python interface is byte oriented as +well. On both Python 2 and Python 3, byte strings should be given to Python's +subprocess library and byte strings should be expected back from it. + +One of the main places in Ansible's controller code that we interact with +other programs is the connection plugins' ``exec_command`` methods. These +methods transform any text strings they receive in the command (and arguments +to the command) to execute into bytes and return stdout and stderr as byte strings +Higher level functions (like action plugins' ``_low_level_execute_command``) +transform the output into text strings. + +.. _module_string_strategy: + +Module string strategy: Native String +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +In modules we use a strategy known as Native Strings. This makes things +easier on the community members who maintain so many of Ansible's +modules, by not breaking backwards compatibility by +mandating that all strings inside of modules are text and converting between +text and bytes at the borders. + +Native strings refer to the type that Python uses when you specify a bare +string literal: + +.. code-block:: python + + "This is a native string" + +In Python 2, these are byte strings. In Python 3 these are text strings. Modules should be +coded to expect bytes on Python 2 and text on Python 3. + +.. _module_utils_string_strategy: + +Module_utils string strategy: hybrid +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +In ``module_utils`` code we use a hybrid string strategy. Although Ansible's +``module_utils`` code is largely like module code, some pieces of it are +used by the controller as well. So it needs to be compatible with modules +and with the controller's assumptions, particularly the string strategy. +The module_utils code attempts to accept native strings as input +to its functions and emit native strings as their output. + +In ``module_utils`` code: + +* Functions **must** accept string parameters as either text strings or byte strings. +* Functions may return either the same type of string as they were given or the native string type for the Python version they are run on. +* Functions that return strings **must** document whether they return strings of the same type as they were given or native strings. + +Module-utils functions are therefore often very defensive in nature. +They convert their string parameters into text (using ``ansible.module_utils._text.to_text``) +at the beginning of the function, do their work, and then convert +the return values into the native string type (using ``ansible.module_utils._text.to_native``) +or back to the string type that their parameters received. + +Tips, tricks, and idioms for Python 2/Python 3 compatibility +------------------------------------------------------------ + +Use forward-compatibility boilerplate +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Use the following boilerplate code at the top of all python files +to make certain constructs act the same way on Python 2 and Python 3: + +.. code-block:: python + + # Make coding more python3-ish + from __future__ import (absolute_import, division, print_function) + __metaclass__ = type + +``__metaclass__ = type`` makes all classes defined in the file into new-style +classes without explicitly inheriting from :class:`object `. + +The ``__future__`` imports do the following: + +:absolute_import: Makes imports look in :data:`sys.path ` for the modules being + imported, skipping the directory in which the module doing the importing + lives. If the code wants to use the directory in which the module doing + the importing, there's a new dot notation to do so. +:division: Makes division of integers always return a float. If you need to + find the quotient use ``x // y`` instead of ``x / y``. +:print_function: Changes :func:`print ` from a keyword into a function. + +.. seealso:: + * `PEP 0328: Absolute Imports `_ + * `PEP 0238: Division `_ + * `PEP 3105: Print function `_ + +Prefix byte strings with ``b_`` +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Since mixing text and bytes types leads to tracebacks we want to be clear +about what variables hold text and what variables hold bytes. We do this by +prefixing any variable holding bytes with ``b_``. For instance: + +.. code-block:: python + + filename = u'/var/tmp/café.txt' + b_filename = to_bytes(filename) + with open(b_filename) as f: + data = f.read() + +We do not prefix the text strings instead because we only operate +on byte strings at the borders, so there are fewer variables that need bytes +than text. + +Import Ansible's bundled Python ``six`` library +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The third-party Python `six `_ library exists +to help projects create code that runs on both Python 2 and Python 3. Ansible +includes a version of the library in module_utils so that other modules can use it +without requiring that it is installed on the remote system. To make use of +it, import it like this: + +.. code-block:: python + + from ansible.module_utils import six + +.. note:: Ansible can also use a system copy of six + + Ansible will use a system copy of six if the system copy is a later + version than the one Ansible bundles. + +Handle exceptions with ``as`` +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +In order for code to function on Python 2.6+ and Python 3, use the +new exception-catching syntax which uses the ``as`` keyword: + +.. code-block:: python + + try: + a = 2/0 + except ValueError as e: + module.fail_json(msg="Tried to divide by zero: %s" % e) + +Do **not** use the following syntax as it will fail on every version of Python 3: + +.. This code block won't highlight because python2 isn't recognized. This is necessary to pass tests under python 3. +.. code-block:: none + + try: + a = 2/0 + except ValueError, e: + module.fail_json(msg="Tried to divide by zero: %s" % e) + +Update octal numbers +^^^^^^^^^^^^^^^^^^^^ + +In Python 2.x, octal literals could be specified as ``0755``. In Python 3, +octals must be specified as ``0o755``. + +String formatting for controller code +------------------------------------- + +Use ``str.format()`` for Python 2.6 compatibility +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Starting in Python 2.6, strings gained a method called ``format()`` to put +strings together. However, one commonly used feature of ``format()`` wasn't +added until Python 2.7, so you need to remember not to use it in Ansible code: + +.. code-block:: python + + # Does not work in Python 2.6! + new_string = "Dear {}, Welcome to {}".format(username, location) + + # Use this instead + new_string = "Dear {0}, Welcome to {1}".format(username, location) + +Both of the format strings above map positional arguments of the ``format()`` +method into the string. However, the first version doesn't work in +Python 2.6. Always remember to put numbers into the placeholders so the code +is compatible with Python 2.6. + +.. seealso:: + Python documentation on `format strings `_ + +Use percent format with byte strings +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +In Python 3.x, byte strings do not have a ``format()`` method. However, it +does have support for the older, percent-formatting. + +.. code-block:: python + + b_command_line = b'ansible-playbook --become-user %s -K %s' % (user, playbook_file) + +.. note:: Percent formatting added in Python 3.5 + + Percent formatting of byte strings was added back into Python 3 in 3.5. + This isn't a problem for us because Python 3.5 is our minimum version. + However, if you happen to be testing Ansible code with Python 3.4 or + earlier, you will find that the byte string formatting here won't work. + Upgrade to Python 3.5 to test. + +.. seealso:: + Python documentation on `percent formatting `_ + +.. _testing_modules_python_3: + +Testing modules on Python 3 +=================================== + +Ansible modules are slightly harder to code to support Python 3 than normal code from other projects. A lot of mocking has to go into unit testing an Ansible module, so it's harder to test that your changes have fixed everything or to to make sure that later commits haven't regressed the Python 3 support. Review our :ref:`testing ` pages for more information. diff --git a/docs/docsite/rst/dev_guide/developing_rebasing.rst b/docs/docsite/rst/dev_guide/developing_rebasing.rst new file mode 100644 index 00000000..81936be1 --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_rebasing.rst @@ -0,0 +1,83 @@ +.. _rebase_guide: + +*********************** +Rebasing a pull request +*********************** + +You may find that your pull request (PR) is out-of-date and needs to be rebased. This can happen for several reasons: + +- Files modified in your PR are in conflict with changes which have already been merged. +- Your PR is old enough that significant changes to automated test infrastructure have occurred. + +Rebasing the branch used to create your PR will resolve both of these issues. + +Configuring your remotes +======================== + +Before you can rebase your PR, you need to make sure you have the proper remotes configured. These instructions apply to any repository on GitHub, including collections repositories. On other platforms (bitbucket, gitlab), the same principles and commands apply but the syntax may be different. We use the ansible/ansible repository here as an example. In other repositories, the branch names may be different. Assuming you cloned your fork in the usual fashion, the ``origin`` remote will point to your fork:: + + $ git remote -v + origin git@github.com:YOUR_GITHUB_USERNAME/ansible.git (fetch) + origin git@github.com:YOUR_GITHUB_USERNAME/ansible.git (push) + +However, you also need to add a remote which points to the upstream repository:: + + $ git remote add upstream https://github.com/ansible/ansible.git + +Which should leave you with the following remotes:: + + $ git remote -v + origin git@github.com:YOUR_GITHUB_USERNAME/ansible.git (fetch) + origin git@github.com:YOUR_GITHUB_USERNAME/ansible.git (push) + upstream https://github.com/ansible/ansible.git (fetch) + upstream https://github.com/ansible/ansible.git (push) + +Checking the status of your branch should show your fork is up-to-date with the ``origin`` remote:: + + $ git status + On branch YOUR_BRANCH + Your branch is up-to-date with 'origin/YOUR_BRANCH'. + nothing to commit, working tree clean + +Rebasing your branch +==================== + +Once you have an ``upstream`` remote configured, you can rebase the branch for your PR:: + + $ git pull --rebase upstream devel + +This will replay the changes in your branch on top of the changes made in the upstream ``devel`` branch. +If there are merge conflicts, you will be prompted to resolve those before you can continue. + +After you rebase, the status of your branch changes:: + + $ git status + On branch YOUR_BRANCH + Your branch and 'origin/YOUR_BRANCH' have diverged, + and have 4 and 1 different commits each, respectively. + (use "git pull" to merge the remote branch into yours) + nothing to commit, working tree clean + +Don't worry, this is normal after a rebase. You should ignore the ``git status`` instructions to use ``git pull``. We'll cover what to do next in the following section. + +Updating your pull request +========================== + +Now that you've rebased your branch, you need to push your changes to GitHub to update your PR. + +Since rebasing re-writes git history, you will need to use a force push:: + + $ git push --force-with-lease + +Your PR on GitHub has now been updated. This will automatically trigger testing of your changes. +You should check in on the status of your PR after tests have completed to see if further changes are required. + +Getting help rebasing +===================== + +For help with rebasing your PR, or other development related questions, join us on our #ansible-devel IRC chat channel on `freenode.net `_. + +.. seealso:: + + :ref:`community_development_process` + Information on roadmaps, opening PRs, Ansibullbot, and more diff --git a/docs/docsite/rst/dev_guide/index.rst b/docs/docsite/rst/dev_guide/index.rst new file mode 100644 index 00000000..fb5b7f4a --- /dev/null +++ b/docs/docsite/rst/dev_guide/index.rst @@ -0,0 +1,92 @@ +.. _developer_guide: + +*************** +Developer Guide +*************** + +Welcome to the Ansible Developer Guide! + +**Who should use this guide?** + +If you want to extend Ansible by using a custom module or plugin locally, creating a module or plugin, adding functionality to an existing module, or expanding test coverage, this guide is for you. We've included detailed information for developers on how to test and document modules, as well as the prerequisites for getting your module or plugin accepted into the main Ansible repository. + +Find the task that best describes what you want to do: + +* I'm looking for a way to address a use case: + + * I want to :ref:`add a custom plugin or module locally `. + * I want to figure out if :ref:`developing a module is the right approach ` for my use case. + * I want to :ref:`develop a collection `. + * I want to :ref:`contribute to an Ansible-maintained collection `. + * I want to :ref:`contribute to a community-maintained collection `. + * I want to :ref:`migrate a role to a collection `. + +* I've read the info above, and I'm sure I want to develop a module: + + * What do I need to know before I start coding? + * I want to :ref:`set up my Python development environment `. + * I want to :ref:`get started writing a module `. + * I want to write a specific kind of module: + * a :ref:`network module ` + * a :ref:`Windows module `. + * an :ref:`Amazon module `. + * an :ref:`OpenStack module `. + * an :ref:`oVirt/RHV module `. + * a :ref:`VMware module `. + * I want to :ref:`write a series of related modules ` that integrate Ansible with a new product (for example, a database, cloud provider, network platform, and so on). + +* I want to refine my code: + + * I want to :ref:`debug my module code `. + * I want to :ref:`add tests `. + * I want to :ref:`document my module `. + * I want to :ref:`document my set of modules for a network platform `. + * I want to follow :ref:`conventions and tips for clean, usable module code `. + * I want to :ref:`make sure my code runs on Python 2 and Python 3 `. + +* I want to work on other development projects: + + * I want to :ref:`write a plugin `. + * I want to :ref:`connect Ansible to a new source of inventory `. + * I want to :ref:`deprecate an outdated module `. + +* I want to contribute back to the Ansible project: + + * I want to :ref:`understand how to contribute to Ansible `. + * I want to :ref:`contribute my module or plugin `. + * I want to :ref:`understand the license agreement ` for contributions to Ansible. + +If you prefer to read the entire guide, here's a list of the pages in order. + +.. toctree:: + :maxdepth: 2 + + developing_locally + developing_modules + developing_modules_general + developing_modules_checklist + developing_modules_best_practices + developing_python_3 + debugging + developing_modules_documenting + developing_modules_general_windows + developing_modules_general_aci + platforms/aws_guidelines + platforms/openstack_guidelines + platforms/ovirt_dev_guide + platforms/vmware_guidelines + developing_modules_in_groups + testing + module_lifecycle + developing_plugins + developing_inventory + developing_core + developing_program_flow_modules + developing_api + developing_rebasing + developing_module_utilities + developing_collections + migrating_roles + collections_galaxy_meta + migrating_roles + overview_architecture diff --git a/docs/docsite/rst/dev_guide/migrating_roles.rst b/docs/docsite/rst/dev_guide/migrating_roles.rst new file mode 100644 index 00000000..a32fa242 --- /dev/null +++ b/docs/docsite/rst/dev_guide/migrating_roles.rst @@ -0,0 +1,410 @@ + +.. _migrating_roles: + +************************************************* +Migrating Roles to Roles in Collections on Galaxy +************************************************* + +You can migrate any existing standalone role into a collection and host the collection on Galaxy. With Ansible collections, you can distribute many roles in a single cohesive unit of re-usable automation. Inside a collection, you can share custom plugins across all roles in the collection instead of duplicating them in each role's :file:`library/`` directory. + +You must migrate roles to collections if you want to distribute them as certified Ansible content. + +.. note:: + + If you want to import your collection to Galaxy, you need a `Galaxy namespace `_. + +See :ref:`developing_collections` for details on collections. + + +.. contents:: + :local: + :depth: 1 + +Comparing standalone roles to collection roles +=============================================== + +:ref:`Standalone roles ` have the following directory structure: + +.. code-block:: bash + :emphasize-lines: 5,7,8 + + role/ + ├── defaults + ├── files + ├── handlers + ├── library + ├── meta + ├── module_utils + ├── [*_plugins] + ├── tasks + ├── templates + ├── tests + └── vars + + +The highlighted directories above will change when you migrate to a collection-based role. The collection directory structure includes a :file:`roles/` directory: + +.. code-block:: bash + + mynamespace/ + └── mycollection/ + ├── docs/ + ├── galaxy.yml + ├── plugins/ + │ ├── modules/ + │ │ └── module1.py + │ ├── inventory/ + │ └── .../ + ├── README.md + ├── roles/ + │ ├── role1/ + │ ├── role2/ + │ └── .../ + ├── playbooks/ + │ ├── files/ + │ ├── vars/ + │ ├── templates/ + │ └── tasks/ + └── tests/ + +You will need to use the Fully Qualified Collection Name (FQCN) to use the roles and plugins when you migrate your role into a collection. The FQCN is the combination of the collection ``namespace``, collection ``name``, and the content item you are referring to. + +So for example, in the above collection, the FQCN to access ``role1`` would be: + +.. code-block:: Python + + mynamespace.mycollection.role1 + + +A collection can contain one or more roles in the :file:`roles/` directory and these are almost identical to standalone roles, except you need to move plugins out of the individual roles, and use the :abbr:`FQCN (Fully Qualified Collection Name)` in some places, as detailed in the next section. + +.. note:: + + In standalone roles, some of the plugin directories referenced their plugin types in the plural sense; this is not the case in collections. + +.. _simple_roles_in_collections: + +Migrating a role to a collection +================================= + +To migrate from a standalone role that contains no plugins to a collection role: + +1. Create a local :file:`ansible_collections` directory and ``cd`` to this new directory. + +2. Create a collection. If you want to import this collection to Ansible Galaxy, you need a `Galaxy namespace `_. + +.. code-block:: bash + + $ ansible-galaxy collection init mynamespace.mycollection + +This creates the collection directory structure. + +3. Copy the standalone role directory into the :file:`roles/` subdirectory of the collection. Roles in collections cannot have hyphens in the role name. Rename any such roles to use underscores instead. + +.. code-block:: bash + + $ mkdir mynamespace/mycollection/roles/my_role/ + $ cp -r /path/to/standalone/role/mynamespace/my_role/\* mynamespace/mycollection/roles/my_role/ + +4. Update ``galaxy.yml`` to include any role dependencies. + +5. Update the collection README.md file to add links to any role README.md files. + + +.. _complex_roles_in_collections: + +Migrating a role with plugins to a collection +============================================== + +To migrate from a standalone role that has plugins to a collection role: + +1. Create a local :file:`ansible_collections directory` and ``cd`` to this new directory. + +2. Create a collection. If you want to import this collection to Ansible Galaxy, you need a `Galaxy namespace `_. + +.. code-block:: bash + + $ ansible-galaxy collection init mynamespace.mycollection + +This creates the collection directory structure. + +3. Copy the standalone role directory into the :file:`roles/` subdirectory of the collection. Roles in collections cannot have hyphens in the role name. Rename any such roles to use underscores instead. + +.. code-block:: bash + + $ mkdir mynamespace/mycollection/roles/my_role/ + $ cp -r /path/to/standalone/role/mynamespace/my_role/\* mynamespace/mycollection/roles/my_role/ + + +4. Move any modules to the :file:`plugins/modules/` directory. + +.. code-block:: bash + + $ mv -r mynamespace/mycollection/roles/my_role/library/\* mynamespace/mycollection/plugins/modules/ + +5. Move any other plugins to the appropriate :file:`plugins/PLUGINTYPE/` directory. See :ref:`migrating_plugins_collection` for additional steps that may be required. + +6. Update ``galaxy.yml`` to include any role dependencies. + +7. Update the collection README.md file to add links to any role README.md files. + +8. Change any references to the role to use the :abbr:`FQCN (Fully Qualified Collection Name)`. + +.. code-block:: yaml + + --- + - name: example role by FQCN + hosts: some_host_pattern + tasks: + - name: import FQCN role from a collection + import_role: + name: mynamespace.mycollection.my_role + + +You can alternately use the ``collections`` keyword to simplify this: + +.. code-block:: yaml + + --- + - name: example role by FQCN + hosts: some_host_pattern + collections: + - mynamespace.mycollection + tasks: + - name: import role from a collection + import_role: + name: my_role + + +.. _migrating_plugins_collection: + +Migrating other role plugins to a collection +--------------------------------------------- + +To migrate other role plugins to a collection: + + +1. Move each nonmodule plugins to the appropriate :file:`plugins/PLUGINTYPE/` directory. The :file:`mynamespace/mycollection/plugins/README.md` file explains the types of plugins that the collection can contain within optionally created subdirectories. + +.. code-block:: bash + + $ mv -r mynamespace/mycollection/roles/my_role/filter_plugins/\* mynamespace/mycollection/plugins/filter/ + +2. Update documentation to use the FQCN. Plugins that use ``doc_fragments`` need to use FQCN (for example, ``mydocfrag`` becomes ``mynamespace.mycollection.mydocfrag``). + +3. Update relative imports work in collections to start with a period. For example, :file:`./filename` and :file:`../asdfu/filestuff` works but :file:`filename` in same directory must be updated to :file:`./filename`. + + +If you have a custom ``module_utils`` or import from ``__init__.py``, you must also: + +#. Change the Python namespace for custom ``module_utils`` to use the :abbr:`FQCN (Fully Qualified Collection Name)` along with the ``ansible_collections`` convention. See :ref:`update_module_utils_role`. + +#. Change how you import from ``__init__.py``. See :ref:`update_init_role`. + + +.. _update_module_utils_role: + +Updating ``module_utils`` +^^^^^^^^^^^^^^^^^^^^^^^^^ + +If any of your custom modules use a custom module utility, once you migrate to a collection you cannot address the module utility in the top level ``ansible.module_utils`` Python namespace. Ansible does not merge content from collections into the the Ansible internal Python namespace. Update any Python import statements that refer to custom module utilities when you migrate your custom content to collections. See :ref:`module_utils in collections ` for more details. + +When coding with ``module_utils`` in a collection, the Python import statement needs to take into account the :abbr:`FQCN (Fully Qualified Collection Name)` along with the ``ansible_collections`` convention. The resulting Python import looks similar to the following example: + +.. code-block:: text + + from ansible_collections.{namespace}.{collectionname}.plugins.module_utils.{util} import {something} + +.. note:: + + You need to follow the same rules in changing paths and using namespaced names for subclassed plugins. + +The following example code snippets show a Python and a PowerShell module using both default Ansible ``module_utils`` and those provided by a collection. In this example the namespace is ``ansible_example`` and the collection is ``community``. + +In the Python example the ``module_utils`` is ``helper`` and the :abbr:`FQCN (Fully Qualified Collection Name)` is ``ansible_example.community.plugins.module_utils.helper``: + +.. code-block:: text + + from ansible.module_utils.basic import AnsibleModule + from ansible.module_utils._text import to_text + from ansible.module_utils.six.moves.urllib.parse import urlencode + from ansible.module_utils.six.moves.urllib.error import HTTPError + from ansible_collections.ansible_example.community.plugins.module_utils.helper import HelperRequest + + argspec = dict( + name=dict(required=True, type='str'), + state=dict(choices=['present', 'absent'], required=True), + ) + + module = AnsibleModule( + argument_spec=argspec, + supports_check_mode=True + ) + + _request = HelperRequest( + module, + headers={"Content-Type": "application/json"}, + data=data + ) + +In the PowerShell example the ``module_utils`` is ``hyperv`` and the :abbr:`FQCN (Fully Qualified Collection Name)` is ``ansible_example.community.plugins.module_utils.hyperv``: + +.. code-block:: powershell + + #!powershell + #AnsibleRequires -CSharpUtil Ansible.Basic + #AnsibleRequires -PowerShell ansible_collections.ansible_example.community.plugins.module_utils.hyperv + + $spec = @{ + name = @{ required = $true; type = "str" } + state = @{ required = $true; choices = @("present", "absent") } + } + $module = [Ansible.Basic.AnsibleModule]::Create($args, $spec) + + Invoke-HyperVFunction -Name $module.Params.name + + $module.ExitJson() + + +.. _update_init_role: + +Importing from __init__.py +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Because of the way that the CPython interpreter does imports, combined with the way the Ansible plugin loader works, if your custom embedded module or plugin requires importing something from an :file:`__init__.py` file, that also becomes part of your collection. You can either originate the content inside a standalone role or use the file name in the Python import statement. The following example is an :file:`__init__.py` file that is part of a callback plugin found inside a collection named ``ansible_example.community``. + +.. code-block:: python + + from ansible_collections.ansible_example.community.plugins.callback.__init__ import CustomBaseClass + + +Example: Migrating a standalone role with plugins to a collection +----------------------------------------------------------------- + +In this example we have a standalone role called ``my-standalone-role.webapp`` to emulate a standalone role that contains dashes in the name (which is not valid in collections). This standalone role contains a custom module in the ``library/`` directory called ``manage_webserver``. + +.. code-block:: bash + + my-standalone-role.webapp + ├── defaults + ├── files + ├── handlers + ├── library + ├── meta + ├── tasks + ├── templates + ├── tests + └── vars + +1. Create a new collection, for example, ``acme.webserver``: + +.. code-block:: bash + + $ ansible-galaxy collection init acme.webserver + - Collection acme.webserver was created successfully + $ tree acme -d 1 + acme + └── webserver + ├── docs + ├── plugins + └── roles + +2. Create the ``webapp`` role inside the collection and copy all contents from the standalone role: + +.. code-block:: bash + + $ mkdir acme/webserver/roles/webapp + $ cp my-standalone-role.webapp/* acme/webserver/roles/webapp/ + +3. Move the ``manage_webserver`` module to its new home in ``acme/webserver/plugins/modules/``: + +.. code-block:: bash + + $ cp my-standalone-role.webapp/library/manage_webserver.py acme/webserver/plugins/modules/manage.py + +.. note:: + + This example changed the original source file ``manage_webserver.py`` to the destination file ``manage.py``. This is optional but the :abbr:`FQCN (Fully Qualified Collection Name)` provides the ``webserver`` context as ``acme.webserver.manage``. + +4. Change ``manage_webserver`` to ``acme.webserver.manage`` in :file:`tasks/` files in the role ( for example, ``my-standalone-role.webapp/tasks/main.yml``) and any use of the original module name. + +.. note:: + + This name change is only required if you changed the original module name, but illustrates content referenced by :abbr:`FQCN (Fully Qualified Collection Name)` can offer context and in turn can make module and plugin names shorter. If you anticipate using these modules independent of the role, keep the original naming conventions. Users can add the :ref:`collections keyword ` in their playbooks. Typically roles are an abstraction layer and users won't use components of the role independently. + + +Example: Supporting standalone roles and migrated collection roles in a downstream RPM +--------------------------------------------------------------------------------------- + +A standalone role can co-exist with its collection role counterpart (for example, as part of a support lifecycle of a product). This should only be done for a transition period, but these two can exist in downstream in packages such as RPMs. For example, the RHEL system roles could coexist with an `example of a RHEL system roles collection `_ and provide existing backwards compatibility with the downstream RPM. + +This section walks through an example creating this coexistence in a downstream RPM and requires Ansible 2.9.0 or later. + +To deliver a role as both a standalone role and a collection role: + +#. Place the collection in :file:`/usr/share/ansible/collections/ansible_collections/`. +#. Copy the contents of the role inside the collection into a directory named after the standalone role and place the standalone role in :file:`/usr/share/ansible/roles/`. + +All previously bundled modules and plugins used in the standalone role are now referenced by :abbr:`FQCN (Fully Qualified Collection Name)` so even though they are no longer embedded, they can be found from the collection contents.This is an example of how the content inside the collection is a unique entity and does not have to be bound to a role or otherwise. You could alternately create two separate collections: one for the modules and plugins and another for the standalone role to migrate to. The role must use the modules and plugins as :abbr:`FQCN (Fully Qualified Collection Name)`. + +The following is an example RPM spec file that accomplishes this using this example content: + +.. code-block:: text + + Name: acme-ansible-content + Summary: Ansible Collection for deploying and configuring ACME webapp + Version: 1.0.0 + Release: 1%{?dist} + License: GPLv3+ + Source0: amce-webserver-1.0.0.tar.gz + + Url: https://github.com/acme/webserver-ansible-collection + BuildArch: noarch + + %global roleprefix my-standalone-role. + %global collection_namespace acme + %global collection_name webserver + + %global collection_dir %{_datadir}/ansible/collections/ansible_collections/%{collection_namespace}/%{collection_name} + + %description + Ansible Collection and standalone role (for backward compatibility and migration) to deploy, configure, and manage the ACME webapp software. + + %prep + %setup -qc + + %build + + %install + + mkdir -p %{buildroot}/%{collection_dir} + cp -r ./* %{buildroot}/%{collection_dir}/ + + mkdir -p %{buildroot}/%{_datadir}/ansible/roles + for role in %{buildroot}/%{collection_dir}/roles/* + do + cp -pR ${role} %{buildroot}/%{_datadir}/ansible/roles/%{roleprefix}$(basename ${role}) + + mkdir -p %{buildroot}/%{_pkgdocdir}/$(basename ${role}) + for docfile in README.md COPYING LICENSE + do + if [ -f ${role}/${docfile} ] + then + cp -p ${role}/${docfile} %{buildroot}/%{_pkgdocdir}/$(basename ${role})/${docfile} + fi + done + done + + + %files + %dir %{_datadir}/ansible + %dir %{_datadir}/ansible/roles + %dir %{_datadir}/ansible/collections + %dir %{_datadir}/ansible/collections/ansible_collections + %{_datadir}/ansible/roles/ + %doc %{_pkgdocdir}/*/README.md + %doc %{_datadir}/ansible/roles/%{roleprefix}*/README.md + %{collection_dir} + %doc %{collection_dir}/roles/*/README.md + %license %{_pkgdocdir}/*/COPYING + %license %{_pkgdocdir}/*/LICENSE diff --git a/docs/docsite/rst/dev_guide/module_lifecycle.rst b/docs/docsite/rst/dev_guide/module_lifecycle.rst new file mode 100644 index 00000000..1201fffa --- /dev/null +++ b/docs/docsite/rst/dev_guide/module_lifecycle.rst @@ -0,0 +1,50 @@ +.. _module_lifecycle: + +********************************** +The lifecycle of an Ansible module +********************************** + +Modules in the main Ansible repo have a defined life cycle, from first introduction to final removal. The module life cycle is tied to the `Ansible release cycle `. +A module may move through these four states: + +1. When a module is first accepted into Ansible, we consider it in tech preview and will mark it as such in the documentation. + +2. If a module matures, we will remove the 'preview' mark in the documentation. We support (though we cannot guarantee) backwards compatibility for these modules, which means their parameters should be maintained with stable meanings. + +3. If a module's target API changes radically, or if someone creates a better implementation of its functionality, we may mark it deprecated. Modules that are deprecated are still available but they are reaching the end of their life cycle. We retain deprecated modules for 4 release cycles with deprecation warnings to help users update playbooks and roles that use them. + +4. When a module has been deprecated for four release cycles, we remove the code and mark the stub file removed. Modules that are removed are no longer shipped with Ansible. The stub file helps users find alternative modules. + +.. _deprecating_modules: + +Deprecating modules +=================== + +To deprecate a module, you must: + +1. Rename the file so it starts with an ``_``, for example, rename ``old_cloud.py`` to ``_old_cloud.py``. This keeps the module available and marks it as deprecated on the module index pages. +2. Mention the deprecation in the relevant ``CHANGELOG``. +3. Reference the deprecation in the relevant ``porting_guide_x.y.rst``. +4. Add ``deprecated:`` to the documentation with the following sub-values: + + :removed_in: A ``string``, such as ``"2.10"``; the version of Ansible where the module will be replaced with a docs-only module stub. Usually current release +4. Mutually exclusive with :removed_by_date:. + :remove_by_date: (Added in Ansible 2.10). An ISO 8601 formatted date when the module will be removed. Usually 2 years from the date the module is deprecated. Mutually exclusive with :removed_in:. + :why: Optional string that used to detail why this has been removed. + :alternative: Inform users they should do instead, for example, ``Use M(whatmoduletouseinstead) instead.``. + +* note: with the advent of collections and ``routing.yml`` we might soon require another entry in this file to mark the deprecation. + +* For an example of documenting deprecation, see this `PR that deprecates multiple modules `_. + Some of the elements in the PR might now be out of date. + +Changing a module name +====================== + +You can also rename a module and keep an alias to the old name by using a symlink that starts with _. +This example allows the ``stat`` module to be called with ``fileinfo``, making the following examples equivalent:: + + EXAMPLES = ''' + ln -s stat.py _fileinfo.py + ansible -m stat -a "path=/tmp" localhost + ansible -m fileinfo -a "path=/tmp" localhost + ''' diff --git a/docs/docsite/rst/dev_guide/overview_architecture.rst b/docs/docsite/rst/dev_guide/overview_architecture.rst new file mode 100644 index 00000000..fdd90625 --- /dev/null +++ b/docs/docsite/rst/dev_guide/overview_architecture.rst @@ -0,0 +1,149 @@ +******************** +Ansible architecture +******************** + +Ansible is a radically simple IT automation engine that automates cloud provisioning, configuration management, application deployment, intra-service orchestration, and many other IT needs. + +Being designed for multi-tier deployments since day one, Ansible models your IT infrastructure by describing how all of your systems inter-relate, rather than just managing one system at a time. + +It uses no agents and no additional custom security infrastructure, so it's easy to deploy - and most importantly, it uses a very simple language (YAML, in the form of Ansible Playbooks) that allow you to describe your automation jobs in a way that approaches plain English. + +In this section, we'll give you a really quick overview of how Ansible works so you can see how the pieces fit together. + +.. contents:: + :local: + +Modules +======= + +Ansible works by connecting to your nodes and pushing out scripts called "Ansible modules" to them. Most modules accept parameters that describe the desired state of the system. +Ansible then executes these modules (over SSH by default), and removes them when finished. Your library of modules can reside on any machine, and there are no servers, daemons, or databases required. + +You can :ref:`write your own modules `, though you should first consider :ref:`whether you should `. Typically you'll work with your favorite terminal program, a text editor, and probably a version control system to keep track of changes to your content. You may write specialized modules in any language that can return JSON (Ruby, Python, bash, and so on). + +Module utilities +================ + +When multiple modules use the same code, Ansible stores those functions as module utilities to minimize duplication and maintenance. For example, the code that parses URLs is ``lib/ansible/module_utils/url.py``. You can :ref:`write your own module utilities ` as well. Module utilities may only be written in Python or in PowerShell. + +Plugins +======= + +:ref:`Plugins ` augment Ansible's core functionality. While modules execute on the target system in separate processes (usually that means on a remote system), plugins execute on the control node within the ``/usr/bin/ansible`` process. Plugins offer options and extensions for the core features of Ansible - transforming data, logging output, connecting to inventory, and more. Ansible ships with a number of handy plugins, and you can easily :ref:`write your own `. For example, you can write an :ref:`inventory plugin ` to connect to any datasource that returns JSON. Plugins must be written in Python. + +Inventory +========= + +By default, Ansible represents the machines it manages in a file (INI, YAML, and so on) that puts all of your managed machines in groups of your own choosing. + +To add new machines, there is no additional SSL signing server involved, so there's never any hassle deciding why a particular machine didn't get linked up due to obscure NTP or DNS issues. + +If there's another source of truth in your infrastructure, Ansible can also connect to that. Ansible can draw inventory, group, and variable information from sources like EC2, Rackspace, OpenStack, and more. + +Here's what a plain text inventory file looks like:: + + --- + [webservers] + www1.example.com + www2.example.com + + [dbservers] + db0.example.com + db1.example.com + +Once inventory hosts are listed, variables can be assigned to them in simple text files (in a subdirectory called 'group_vars/' or 'host_vars/' or directly in the inventory file. + +Or, as already mentioned, use a dynamic inventory to pull your inventory from data sources like EC2, Rackspace, or OpenStack. + +Playbooks +========= + +Playbooks can finely orchestrate multiple slices of your infrastructure topology, with very detailed control over how many machines to tackle at a time. This is where Ansible starts to get most interesting. + +Ansible's approach to orchestration is one of finely-tuned simplicity, as we believe your automation code should make perfect sense to you years down the road and there should be very little to remember about special syntax or features. + +Here's what a simple playbook looks like:: + + --- + - hosts: webservers + serial: 5 # update 5 machines at a time + roles: + - common + - webapp + + - hosts: content_servers + roles: + - common + - content + +.. _ansible_search_path: + +The Ansible search path +======================= + +Modules, module utilities, plugins, playbooks, and roles can live in multiple locations. If you +write your own code to extend Ansible's core features, you may have multiple files with similar or the same names in different locations on your Ansible control node. The search path determines which of these files Ansible will discover and use on any given playbook run. + +Ansible's search path grows incrementally over a run. As +Ansible finds each playbook and role included in a given run, it appends +any directories related to that playbook or role to the search path. Those +directories remain in scope for the duration of the run, even after the playbook or role +has finished executing. Ansible loads modules, module utilities, and plugins in this order: + +1. Directories adjacent to a playbook specified on the command line. If you run Ansible with ``ansible-playbook /path/to/play.yml``, Ansible appends these directories if they exist: + + .. code-block:: bash + + /path/to/modules + /path/to/module_utils + /path/to/plugins + +2. Directories adjacent to a playbook that is statically imported by a + playbook specified on the command line. If ``play.yml`` includes + ``- import_playbook: /path/to/subdir/play1.yml``, Ansible appends these directories if they exist: + + .. code-block:: bash + + /path/to/subdir/modules + /path/to/subdir/module_utils + /path/to/subdir/plugins + +3. Subdirectories of a role directory referenced by a playbook. If + ``play.yml`` runs ``myrole``, Ansible appends these directories if they exist: + + .. code-block:: bash + + /path/to/roles/myrole/modules + /path/to/roles/myrole/module_utils + /path/to/roles/myrole/plugins + +4. Directories specified as default paths in ``ansible.cfg`` or by the related + environment variables, including the paths for the various plugin types. See :ref:`ansible_configuration_settings` for more information. + Sample ``ansible.cfg`` fields: + + .. code-block:: bash + + DEFAULT_MODULE_PATH + DEFAULT_MODULE_UTILS_PATH + DEFAULT_CACHE_PLUGIN_PATH + DEFAULT_FILTER_PLUGIN_PATH + + Sample environment variables: + + .. code-block:: bash + + ANSIBLE_LIBRARY + ANSIBLE_MODULE_UTILS + ANSIBLE_CACHE_PLUGINS + ANSIBLE_FILTER_PLUGINS + +5. The standard directories that ship as part of the Ansible distribution. + +.. caution:: + + Modules, module utilities, and plugins in user-specified directories will + override the standard versions. This includes some files with generic names. + For example, if you have a file named ``basic.py`` in a user-specified + directory, it will override the standard ``ansible.module_utils.basic``. + + If you have more than one module, module utility, or plugin with the same name in different user-specified directories, the order of commands at the command line and the order of includes and roles in each play will affect which one is found and used on that particular play. diff --git a/docs/docsite/rst/dev_guide/platforms/aws_guidelines.rst b/docs/docsite/rst/dev_guide/platforms/aws_guidelines.rst new file mode 100644 index 00000000..acce3de4 --- /dev/null +++ b/docs/docsite/rst/dev_guide/platforms/aws_guidelines.rst @@ -0,0 +1,754 @@ +.. _AWS_module_development: + +**************************************************** +Guidelines for Ansible Amazon AWS module development +**************************************************** + +The Ansible AWS collection (on `Galaxy `_, source code `repository `_) is maintained by the Ansible AWS Working Group. For further information see the `AWS working group community page `_. If you are planning to contribute AWS modules to Ansible then getting in touch with the working group is a good way to start, especially because a similar module may already be under development. + +.. contents:: + :local: + +Maintaining existing modules +============================ + +Fixing bugs +----------- + +Bug fixes to code that relies on boto will still be accepted. When possible, +the code should be ported to use boto3. + +Adding new features +------------------- + +Try to keep backward compatibility with relatively recent versions of boto3. That means that if you +want to implement some functionality that uses a new feature of boto3, it should only fail if that +feature actually needs to be run, with a message stating the missing feature and minimum required +version of boto3. + +Use feature testing (for example, ``hasattr('boto3.module', 'shiny_new_method')``) to check whether boto3 +supports a feature rather than version checking. For example, from the ``ec2`` module: + +.. code-block:: python + + if boto_supports_profile_name_arg(ec2): + params['instance_profile_name'] = instance_profile_name + else: + if instance_profile_name is not None: + module.fail_json(msg="instance_profile_name parameter requires boto version 2.5.0 or higher") + +Migrating to boto3 +------------------ + +Prior to Ansible 2.0, modules were written in either boto3 or boto. We are +still porting some modules to boto3. Modules that still require boto should be ported to use boto3 rather than using both libraries (boto and boto3). We would like to remove the boto dependency from all modules. + +Porting code to AnsibleAWSModule +--------------------------------- + +Some old AWS modules use the generic ``AnsibleModule`` as a base rather than the more efficient ``AnsibleAWSModule``. To port an old module to ``AnsibleAWSModule``, change: + +.. code-block:: python + + from ansible.module_utils.basic import AnsibleModule + ... + module = AnsibleModule(...) + +to: + +.. code-block:: python + + from ansible.module_utils.aws.core import AnsibleAWSModule + ... + module = AnsibleAWSModule(...) + +Few other changes are required. AnsibleAWSModule +does not inherit methods from AnsibleModule by default, but most useful methods +are included. If you do find an issue, please raise a bug report. + +When porting, keep in mind that AnsibleAWSModule also will add the default ec2 +argument spec by default. In pre-port modules, you should see common arguments +specified with: + +.. code-block:: python + + def main(): + argument_spec = ec2_argument_spec() + argument_spec.update(dict( + state=dict(default='present', choices=['present', 'absent', 'enabled', 'disabled']), + name=dict(default='default'), + # ... and so on ... + )) + module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True,) + +These can be replaced with: + +.. code-block:: python + + def main(): + argument_spec = dict( + state=dict(default='present', choices=['present', 'absent', 'enabled', 'disabled']), + name=dict(default='default'), + # ... and so on ... + ) + module = AnsibleAWSModule(argument_spec=argument_spec, supports_check_mode=True,) + +Creating new AWS modules +======================== + +Use boto3 and AnsibleAWSModule +------------------------------- + +All new AWS modules must use boto3 and ``AnsibleAWSModule``. + +``AnsibleAWSModule`` greatly simplifies exception handling and library +management, reducing the amount of boilerplate code. If you cannot +use ``AnsibleAWSModule`` as a base, you must document the reason and request an exception to this rule. + +Naming your module +------------------ + +Base the name of the module on the part of AWS that you actually use. (A good rule of thumb is to +take whatever module you use with boto as a starting point). Don't further abbreviate names - if +something is a well known abbreviation of a major component of AWS (for example, VPC or ELB), that's fine, but +don't create new ones independently. + +Unless the name of your service is quite unique, please consider using ``aws_`` as a prefix. For example ``aws_lambda``. + +Importing botocore and boto3 +---------------------------- + +The ``ansible.module_utils.ec2`` module and ``ansible.module_utils.core.aws`` modules both +automatically import boto3 and botocore. If boto3 is missing from the system then the variable +``HAS_BOTO3`` will be set to false. Normally, this means that modules don't need to import +boto3 directly. There is no need to check ``HAS_BOTO3`` when using AnsibleAWSModule +as the module does that check: + +.. code-block:: python + + from ansible.module_utils.aws.core import AnsibleAWSModule + try: + import botocore + except ImportError: + pass # handled by AnsibleAWSModule + +or: + +.. code-block:: python + + from ansible.module_utils.basic import AnsibleModule + from ansible.module_utils.ec2 import HAS_BOTO3 + try: + import botocore + except ImportError: + pass # handled by imported HAS_BOTO3 + + def main(): + + if not HAS_BOTO3: + module.fail_json(msg='boto3 and botocore are required for this module') + +Supporting Module Defaults +-------------------------- + +The existing AWS modules support using :ref:`module_defaults ` for common +authentication parameters. To do the same for your new module, add an entry for it in +``lib/ansible/config/module_defaults.yml``. These entries take the form of: + +.. code-block:: yaml + + aws_module_name: + - aws + +Connecting to AWS +================= + +AnsibleAWSModule provides the ``resource`` and ``client`` helper methods for obtaining boto3 connections. +These handle some of the more esoteric connection options, such as security tokens and boto profiles. + +If using the basic AnsibleModule then you should use ``get_aws_connection_info`` and then ``boto3_conn`` +to connect to AWS as these handle the same range of connection options. + +These helpers also for missing profiles or a region not set when it needs to be, so you don't have to. + +An example of connecting to ec2 is shown below. Note that unlike boto there is no ``NoAuthHandlerFound`` +exception handling like in boto. Instead, an ``AuthFailure`` exception will be thrown when you use the +connection. To ensure that authorization, parameter validation and permissions errors are all caught, +you should catch ``ClientError`` and ``BotoCoreError`` exceptions with every boto3 connection call. +See exception handling: + +.. code-block:: python + + module.client('ec2') + +or for the higher level ec2 resource: + +.. code-block:: python + + module.resource('ec2') + + +An example of the older style connection used for modules based on AnsibleModule rather than AnsibleAWSModule: + +.. code-block:: python + + region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True) + connection = boto3_conn(module, conn_type='client', resource='ec2', region=region, endpoint=ec2_url, **aws_connect_params) + +.. code-block:: python + + region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True) + connection = boto3_conn(module, conn_type='client', resource='ec2', region=region, endpoint=ec2_url, **aws_connect_params) + + +Common Documentation Fragments for Connection Parameters +-------------------------------------------------------- + +There are two :ref:`common documentation fragments ` +that should be included into almost all AWS modules: + +* ``aws`` - contains the common boto connection parameters +* ``ec2`` - contains the common region parameter required for many AWS modules + +These fragments should be used rather than re-documenting these properties to ensure consistency +and that the more esoteric connection options are documented. For example: + +.. code-block:: python + + DOCUMENTATION = ''' + module: my_module + # some lines omitted here + requirements: [ 'botocore', 'boto3' ] + extends_documentation_fragment: + - aws + - ec2 + ''' + +Handling exceptions +=================== + +You should wrap any boto3 or botocore call in a try block. If an exception is thrown, then there +are a number of possibilities for handling it. + +* Catch the general ``ClientError`` or look for a specific error code with + ``is_boto3_error_code``. +* Use ``aws_module.fail_json_aws()`` to report the module failure in a standard way +* Retry using AWSRetry +* Use ``fail_json()`` to report the failure without using ``ansible.module_utils.aws.core`` +* Do something custom in the case where you know how to handle the exception + +For more information on botocore exception handling see the `botocore error documentation `_. + +Using is_boto3_error_code +------------------------- + +To use ``ansible.module_utils.aws.core.is_boto3_error_code`` to catch a single +AWS error code, call it in place of ``ClientError`` in your except clauses. In +this case, *only* the ``InvalidGroup.NotFound`` error code will be caught here, +and any other error will be raised for handling elsewhere in the program. + +.. code-block:: python + + try: + info = connection.describe_security_groups(**kwargs) + except is_boto3_error_code('InvalidGroup.NotFound'): + pass + do_something(info) # do something with the info that was successfully returned + +Using fail_json_aws() +--------------------- + +In the AnsibleAWSModule there is a special method, ``module.fail_json_aws()`` for nice reporting of +exceptions. Call this on your exception and it will report the error together with a traceback for +use in Ansible verbose mode. + +You should use the AnsibleAWSModule for all new modules, unless not possible. If adding significant +amounts of exception handling to existing modules, we recommend migrating the module to use AnsibleAWSModule +(there are very few changes required to do this) + +.. code-block:: python + + from ansible.module_utils.aws.core import AnsibleAWSModule + + # Set up module parameters + # module params code here + + # Connect to AWS + # connection code here + + # Make a call to AWS + name = module.params.get['name'] + try: + result = connection.describe_frooble(FroobleName=name) + except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e: + module.fail_json_aws(e, msg="Couldn't obtain frooble %s" % name) + +Note that it should normally be acceptable to catch all normal exceptions here, however if you +expect anything other than botocore exceptions you should test everything works as expected. + +If you need to perform an action based on the error boto3 returned, use the error code. + +.. code-block:: python + + # Make a call to AWS + name = module.params.get['name'] + try: + result = connection.describe_frooble(FroobleName=name) + except botocore.exceptions.ClientError as e: + if e.response['Error']['Code'] == 'FroobleNotFound': + workaround_failure() # This is an error that we can work around + else: + module.fail_json_aws(e, msg="Couldn't obtain frooble %s" % name) + except botocore.exceptions.BotoCoreError as e: + module.fail_json_aws(e, msg="Couldn't obtain frooble %s" % name) + +using fail_json() and avoiding ansible.module_utils.aws.core +------------------------------------------------------------ + +Boto3 provides lots of useful information when an exception is thrown so pass this to the user +along with the message. + +.. code-block:: python + + from ansible.module_utils.ec2 import HAS_BOTO3 + try: + import botocore + except ImportError: + pass # caught by imported HAS_BOTO3 + + # Connect to AWS + # connection code here + + # Make a call to AWS + name = module.params.get['name'] + try: + result = connection.describe_frooble(FroobleName=name) + except botocore.exceptions.ClientError as e: + module.fail_json(msg="Couldn't obtain frooble %s: %s" % (name, str(e)), + exception=traceback.format_exc(), + **camel_dict_to_snake_dict(e.response)) + +Note: we use `str(e)` rather than `e.message` as the latter doesn't +work with python3 + +If you need to perform an action based on the error boto3 returned, use the error code. + +.. code-block:: python + + # Make a call to AWS + name = module.params.get['name'] + try: + result = connection.describe_frooble(FroobleName=name) + except botocore.exceptions.ClientError as e: + if e.response['Error']['Code'] == 'FroobleNotFound': + workaround_failure() # This is an error that we can work around + else: + module.fail_json(msg="Couldn't obtain frooble %s: %s" % (name, str(e)), + exception=traceback.format_exc(), + **camel_dict_to_snake_dict(e.response)) + except botocore.exceptions.BotoCoreError as e: + module.fail_json_aws(e, msg="Couldn't obtain frooble %s" % name) + + +API throttling (rate limiting) and pagination +============================================= + +For methods that return a lot of results, boto3 often provides +`paginators `_. If the method +you're calling has ``NextToken`` or ``Marker`` parameters, you should probably +check whether a paginator exists (the top of each boto3 service reference page has a link +to Paginators, if the service has any). To use paginators, obtain a paginator object, +call ``paginator.paginate`` with the appropriate arguments and then call ``build_full_result``. + +Any time that you are calling the AWS API a lot, you may experience API throttling, +and there is an ``AWSRetry`` decorator that can be used to ensure backoff. Because +exception handling could interfere with the retry working properly (as AWSRetry needs to +catch throttling exceptions to work correctly), you'd need to provide a backoff function +and then put exception handling around the backoff function. + +You can use ``exponential_backoff`` or ``jittered_backoff`` strategies - see +the cloud ``module_utils`` ()/lib/ansible/module_utils/cloud.py) +and `AWS Architecture blog `_ for more details. + +The combination of these two approaches is then: + +.. code-block:: python + + @AWSRetry.exponential_backoff(retries=5, delay=5) + def describe_some_resource_with_backoff(client, **kwargs): + paginator = client.get_paginator('describe_some_resource') + return paginator.paginate(**kwargs).build_full_result()['SomeResource'] + + def describe_some_resource(client, module): + filters = ansible_dict_to_boto3_filter_list(module.params['filters']) + try: + return describe_some_resource_with_backoff(client, Filters=filters) + except botocore.exceptions.ClientError as e: + module.fail_json_aws(e, msg="Could not describe some resource") + + +If the underlying ``describe_some_resources`` API call throws a ``ResourceNotFound`` +exception, ``AWSRetry`` takes this as a cue to retry until it's not thrown (this +is so that when creating a resource, we can just retry until it exists). + +To handle authorization failures or parameter validation errors in +``describe_some_resource_with_backoff``, where we just want to return ``None`` if +the resource doesn't exist and not retry, we need: + +.. code-block:: python + + @AWSRetry.exponential_backoff(retries=5, delay=5) + def describe_some_resource_with_backoff(client, **kwargs): + try: + return client.describe_some_resource(ResourceName=kwargs['name'])['Resources'] + except botocore.exceptions.ClientError as e: + if e.response['Error']['Code'] == 'ResourceNotFound': + return None + else: + raise + except BotoCoreError as e: + raise + + def describe_some_resource(client, module): + name = module.params.get['name'] + try: + return describe_some_resource_with_backoff(client, name=name) + except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e: + module.fail_json_aws(e, msg="Could not describe resource %s" % name) + + +To make use of AWSRetry easier, it can now be wrapped around a client returned +by ``AnsibleAWSModule``. any call from a client. To add retries to a client, +create a client: + +.. code-block:: python + + module.client('ec2', retry_decorator=AWSRetry.jittered_backoff(retries=10)) + +Any calls from that client can be made to use the decorator passed at call-time +using the `aws_retry` argument. By default, no retries are used. + +.. code-block:: python + + ec2 = module.client('ec2', retry_decorator=AWSRetry.jittered_backoff(retries=10)) + ec2.describe_instances(InstanceIds=['i-123456789'], aws_retry=True) + + # equivalent with normal AWSRetry + @AWSRetry.jittered_backoff(retries=10) + def describe_instances(client, **kwargs): + return ec2.describe_instances(**kwargs) + + describe_instances(module.client('ec2'), InstanceIds=['i-123456789']) + +The call will be retried the specified number of times, so the calling functions +don't need to be wrapped in the backoff decorator. + +You can also use customization for ``retries``, ``delay`` and ``max_delay`` parameters used by +``AWSRetry.jittered_backoff`` API using module params. You can take a look at +the `cloudformation ` module for example. + +To make all Amazon modules uniform, prefix the module param with ``backoff_``, so ``retries`` becomes ``backoff_retries`` + and likewise with ``backoff_delay`` and ``backoff_max_delay``. + +Returning Values +================ + +When you make a call using boto3, you will probably get back some useful information that you +should return in the module. As well as information related to the call itself, you will also have +some response metadata. It is OK to return this to the user as well as they may find it useful. + +Boto3 returns all values CamelCased. Ansible follows Python standards for variable names and uses +snake_case. There is a helper function in module_utils/ec2.py called `camel_dict_to_snake_dict` +that allows you to easily convert the boto3 response to snake_case. + +You should use this helper function and avoid changing the names of values returned by Boto3. +E.g. if boto3 returns a value called 'SecretAccessKey' do not change it to 'AccessKey'. + +.. code-block:: python + + # Make a call to AWS + result = connection.aws_call() + + # Return the result to the user + module.exit_json(changed=True, **camel_dict_to_snake_dict(result)) + +Dealing with IAM JSON policy +============================ + +If your module accepts IAM JSON policies then set the type to 'json' in the module spec. For +example: + +.. code-block:: python + + argument_spec.update( + dict( + policy=dict(required=False, default=None, type='json'), + ) + ) + +Note that AWS is unlikely to return the policy in the same order that is was submitted. Therefore, +use the `compare_policies` helper function which handles this variance. + +`compare_policies` takes two dictionaries, recursively sorts and makes them hashable for comparison +and returns True if they are different. + +.. code-block:: python + + from ansible.module_utils.ec2 import compare_policies + + import json + + # some lines skipped here + + # Get the policy from AWS + current_policy = json.loads(aws_object.get_policy()) + user_policy = json.loads(module.params.get('policy')) + + # Compare the user submitted policy to the current policy ignoring order + if compare_policies(user_policy, current_policy): + # Update the policy + aws_object.set_policy(user_policy) + else: + # Nothing to do + pass + +Dealing with tags +================= + +AWS has a concept of resource tags. Usually the boto3 API has separate calls for tagging and +untagging a resource. For example, the ec2 API has a create_tags and delete_tags call. + +It is common practice in Ansible AWS modules to have a `purge_tags` parameter that defaults to +true. + +The `purge_tags` parameter means that existing tags will be deleted if they are not specified by +the Ansible task. + +There is a helper function `compare_aws_tags` to ease dealing with tags. It can compare two dicts +and return the tags to set and the tags to delete. See the Helper function section below for more +detail. + +Helper functions +================ + +Along with the connection functions in Ansible ec2.py module_utils, there are some other useful +functions detailed below. + +camel_dict_to_snake_dict +------------------------ + +boto3 returns results in a dict. The keys of the dict are in CamelCase format. In keeping with +Ansible format, this function will convert the keys to snake_case. + +``camel_dict_to_snake_dict`` takes an optional parameter called ``ignore_list`` which is a list of +keys not to convert (this is usually useful for the ``tags`` dict, whose child keys should remain with +case preserved) + +Another optional parameter is ``reversible``. By default, ``HTTPEndpoint`` is converted to ``http_endpoint``, +which would then be converted by ``snake_dict_to_camel_dict`` to ``HttpEndpoint``. +Passing ``reversible=True`` converts HTTPEndpoint to ``h_t_t_p_endpoint`` which converts back to ``HTTPEndpoint``. + +snake_dict_to_camel_dict +------------------------ + +`snake_dict_to_camel_dict` converts snake cased keys to camel case. By default, because it was +first introduced for ECS purposes, this converts to dromedaryCase. An optional +parameter called `capitalize_first`, which defaults to `False`, can be used to convert to CamelCase. + +ansible_dict_to_boto3_filter_list +--------------------------------- + +Converts a an Ansible list of filters to a boto3 friendly list of dicts. This is useful for any +boto3 `_facts` modules. + +boto_exception +-------------- + +Pass an exception returned from boto or boto3, and this function will consistently get the message from the exception. + +Deprecated: use `AnsibleAWSModule`'s `fail_json_aws` instead. + + +boto3_tag_list_to_ansible_dict +------------------------------ + +Converts a boto3 tag list to an Ansible dict. Boto3 returns tags as a list of dicts containing keys +called 'Key' and 'Value' by default. This key names can be overridden when calling the function. +For example, if you have already camel_cased your list of tags you may want to pass lowercase key +names instead, in other words, 'key' and 'value'. + +This function converts the list in to a single dict where the dict key is the tag key and the dict +value is the tag value. + +ansible_dict_to_boto3_tag_list +------------------------------ + +Opposite of above. Converts an Ansible dict to a boto3 tag list of dicts. You can again override +the key names used if 'Key' and 'Value' is not suitable. + +get_ec2_security_group_ids_from_names +------------------------------------- + +Pass this function a list of security group names or combination of security group names and IDs +and this function will return a list of IDs. You should also pass the VPC ID if known because +security group names are not necessarily unique across VPCs. + +compare_policies +---------------- + +Pass two dicts of policies to check if there are any meaningful differences and returns true +if there are. This recursively sorts the dicts and makes them hashable before comparison. + +This method should be used any time policies are being compared so that a change in order +doesn't result in unnecessary changes. + +compare_aws_tags +---------------- + +Pass two dicts of tags and an optional purge parameter and this function will return a dict +containing key pairs you need to modify and a list of tag key names that you need to remove. Purge +is True by default. If purge is False then any existing tags will not be modified. + +This function is useful when using boto3 'add_tags' and 'remove_tags' functions. Be sure to use the +other helper function `boto3_tag_list_to_ansible_dict` to get an appropriate tag dict before +calling this function. Since the AWS APIs are not uniform (for example, EC2 is different from Lambda) this will work +without modification for some (Lambda) and others may need modification before using these values +(such as EC2, with requires the tags to unset to be in the form `[{'Key': key1}, {'Key': key2}]`). + +Integration Tests for AWS Modules +================================= + +All new AWS modules should include integration tests to ensure that any changes in AWS APIs that +affect the module are detected. At a minimum this should cover the key API calls and check the +documented return values are present in the module result. + +For general information on running the integration tests see the :ref:`Integration Tests page of the +Module Development Guide `, especially the section on configuration for cloud tests. + +The integration tests for your module should be added in `test/integration/targets/MODULE_NAME`. + +You must also have a aliases file in `test/integration/targets/MODULE_NAME/aliases`. This file serves +two purposes. First indicates it's in an AWS test causing the test framework to make AWS credentials +available during the test run. Second putting the test in a test group causing it to be run in the +continuous integration build. + +Tests for new modules should be added to the same group as existing AWS tests. In general just copy +an existing aliases file such as the `aws_s3 tests aliases file `_. + +AWS Credentials for Integration Tests +------------------------------------- + +The testing framework handles running the test with appropriate AWS credentials, these are made available +to your test in the following variables: + +* `aws_region` +* `aws_access_key` +* `aws_secret_key` +* `security_token` + +So all invocations of AWS modules in the test should set these parameters. To avoid duplicating these +for every call, it's preferable to use :ref:`module_defaults `. For example: + +.. code-block:: yaml + + - name: set connection information for aws modules and run tasks + module_defaults: + group/aws: + aws_access_key: "{{ aws_access_key }}" + aws_secret_key: "{{ aws_secret_key }}" + security_token: "{{ security_token | default(omit) }}" + region: "{{ aws_region }}" + + block: + + - name: Do Something + ec2_instance: + ... params ... + + - name: Do Something Else + ec2_instance: + ... params ... + +AWS Permissions for Integration Tests +------------------------------------- + +As explained in the :ref:`Integration Test guide ` +there are defined IAM policies in `mattclay/aws-terminator `_ that contain the necessary permissions +to run the AWS integration test. + +If your module interacts with a new service or otherwise requires new permissions, tests will fail when you submit a pull request and the +`Ansibullbot `_ will tag your PR as needing revision. +We do not automatically grant additional permissions to the roles used by the continuous integration builds. +You will need to raise a Pull Request against `mattclay/aws-terminator `_ to add them. + +If your PR has test failures, check carefully to be certain the failure is only due to the missing permissions. If you've ruled out other sources of failure, add a comment with the `ready_for_review` +tag and explain that it's due to missing permissions. + +Your pull request cannot be merged until the tests are passing. If your pull request is failing due to missing permissions, +you must collect the minimum IAM permissions required to +run the tests. + +There are two ways to figure out which IAM permissions you need for your PR to pass: + +* Start with the most permissive IAM policy, run the tests to collect information about which resources your tests actually use, then construct a policy based on that output. This approach only works on modules that use `AnsibleAWSModule`. +* Start with the least permissive IAM policy, run the tests to discover a failure, add permissions for the resource that addresses that failure, then repeat. If your module uses `AnsibleModule` instead of `AnsibleAWSModule`, you must use this approach. + +To start with the most permissive IAM policy: + +1) `Create an IAM policy `_ that allows all actions (set ``Action`` and ``Resource`` to ``*```). +2) Run your tests locally with this policy. On AnsibleAWSModule-based modules, the ``debug_botocore_endpoint_logs`` option is automatically set to ``yes``, so you should see a list of AWS ACTIONS after the PLAY RECAP showing all the permissions used. If your tests use a boto/AnsibleModule module, you must start with the least permissive policy (see below). +3) Modify your policy to allow only the actions your tests use. Restrict account, region, and prefix where possible. Wait a few minutes for your policy to update. +4) Run the tests again with a user or role that allows only the new policy. +5) If the tests fail, troubleshoot (see tips below), modify the policy, run the tests again, and repeat the process until the tests pass with a restrictive policy. +6) Open a pull request proposing the minimum required policy to the `CI policies `_. + +To start from the least permissive IAM policy: + +1) Run the integration tests locally with no IAM permissions. +2) Examine the error when the tests reach a failure. + a) If the error message indicates the action used in the request, add the action to your policy. + b) If the error message does not indicate the action used in the request: + - Usually the action is a CamelCase version of the method name - for example, for an ec2 client the method `describe_security_groups` correlates to the action `ec2:DescribeSecurityGroups`. + - Refer to the documentation to identify the action. + c) If the error message indicates the resource ARN used in the request, limit the action to that resource. + d) If the error message does not indicate the resource ARN used: + - Determine if the action can be restricted to a resource by examining the documentation. + - If the action can be restricted, use the documentation to construct the ARN and add it to the policy. +3) Add the action or resource that caused the failure to `an IAM policy `_. Wait a few minutes for your policy to update. +4) Run the tests again with this policy attached to your user or role. +5) If the tests still fail at the same place with the same error you will need to troubleshoot (see tips below). If the first test passes, repeat steps 2 and 3 for the next error. Repeat the process until the tests pass with a restrictive policy. +6) Open a pull request proposing the minimum required policy to the `CI policies `_. + +Troubleshooting IAM policies +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- When you make changes to a policy, wait a few minutes for the policy to update before re-running the tests. +- Use the `policy simulator `_ to verify that each action (limited by resource when applicable) in your policy is allowed. +- If you're restricting actions to certain resources, replace resources temporarily with `*`. If the tests pass with wildcard resources, there is a problem with the resource definition in your policy. +- If the initial troubleshooting above doesn't provide any more insight, AWS may be using additional undisclosed resources and actions. +- Examine the AWS FullAccess policy for the service for clues. +- Re-read the AWS documentation, especially the list of `Actions, Resources and Condition Keys `_ for the various AWS services. +- Look at the `cloudonaut `_ documentation as a troubleshooting cross-reference. +- Use a search engine. +- Ask in the Ansible IRC channel #ansible-aws (on freenode IRC). + +Unsupported Integration tests +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +There are a limited number of reasons why it may not be practical to run integration +tests for a module within CI. Where these apply you should add the keyword +`unsupported` to the aliases file in `test/integration/targets/MODULE_NAME/aliases`. + +Some cases where tests should be marked as unsupported: +1) The tests take longer than 10 or 15 minutes to complete +2) The tests create expensive resources +3) The tests create inline policies +4) The tests require the existance of external resources +5) The tests manage Account level security policies such as the password policy or AWS Organizations. + +Where one of these reasons apply you should open a pull request proposing the minimum required policy to the +`unsupported test policies `_. + +Unsupported integration tests will not be automatically run by CI. However, the +necessary policies should be available so that the tests can be manually run by +someone performing a PR review or writing a patch. diff --git a/docs/docsite/rst/dev_guide/platforms/openstack_guidelines.rst b/docs/docsite/rst/dev_guide/platforms/openstack_guidelines.rst new file mode 100644 index 00000000..8827cefb --- /dev/null +++ b/docs/docsite/rst/dev_guide/platforms/openstack_guidelines.rst @@ -0,0 +1,57 @@ +.. _OpenStack_module_development: + +OpenStack Ansible Modules +========================= + +The OpenStack collection (on `Galaxy `_, source code `repository `_) contains modules for interacting with OpenStack as either an admin or an end user. If the module does not begin with ``os_``, it is either deprecated or soon to be deprecated. This document serves as developer coding guidelines for modules in this collection. + +.. contents:: + :local: + +Naming +------ + +* All module names should start with ``os_`` +* Name any module that a cloud consumer would expect to use after the logical resource it manages: ``os_server`` not ``os_nova``. This naming convention acknowledges that the end user does not care which service manages the resource - that is a deployment detail. For example cloud consumers may not know whether their floating IPs are managed by Nova or Neutron. +* Name any module that a cloud admin would expect to use with the service and the resource: ``os_keystone_domain``. +* If the module is one that a cloud admin and a cloud consumer could both use, + the cloud consumer rules apply. + +Interface +--------- + +* If the resource being managed has an id, it should be returned. +* If the resource being managed has an associated object more complex than + an id, it should also be returned. + +Interoperability +---------------- + +* It should be assumed that the cloud consumer does not know a bazillion + details about the deployment choices their cloud provider made, and a best + effort should be made to present one sane interface to the Ansible user + regardless of deployer insanity. +* All modules should work appropriately against all existing known public + OpenStack clouds. +* It should be assumed that a user may have more than one cloud account that + they wish to combine as part of a single Ansible-managed infrastructure. + +Libraries +--------- + +* All modules should use ``openstack_full_argument_spec`` to pick up the + standard input such as auth and ssl support. +* All modules should include ``extends_documentation_fragment: openstack``. +* All complex cloud interaction or interoperability code should be housed in + the `openstacksdk `_ + library. +* All OpenStack API interactions should happen via the openstacksdk and not via + OpenStack Client libraries. The OpenStack Client libraries do no have end + users as a primary audience, they are for intra-server communication. + +Testing +------- + +* Integration testing is currently done in `OpenStack's CI system `_ +* Testing in openstacksdk produces an obvious chicken-and-egg scenario. Work is under + way to trigger from and report on PRs directly. diff --git a/docs/docsite/rst/dev_guide/platforms/ovirt_dev_guide.rst b/docs/docsite/rst/dev_guide/platforms/ovirt_dev_guide.rst new file mode 100644 index 00000000..bf461d40 --- /dev/null +++ b/docs/docsite/rst/dev_guide/platforms/ovirt_dev_guide.rst @@ -0,0 +1,220 @@ +.. _oVirt_module_development: + +oVirt Ansible Modules +===================== + +The set of modules for interacting with oVirt/RHV are currently part of the community.general collection (on `Galaxy `_, source code `repository `_). This document serves as developer coding guidelines for creating oVirt/RHV modules. + +.. contents:: + :local: + +Naming +------ + +- All modules should start with an ``ovirt_`` prefix. +- All modules should be named after the resource it manages in singular + form. +- All modules that gather information should have a ``_info`` + suffix. + +Interface +--------- + +- Every module should return the ID of the resource it manages. +- Every module should return the dictionary of the resource it manages. +- Never change the name of the parameter, as we guarantee backward + compatibility. Use aliases instead. +- If a parameter can't achieve idempotency for any reason, please + document it. + +Interoperability +---------------- + +- All modules should work against all minor versions of + version 4 of the API. Version 3 of the API is not supported. + +Libraries +--------- + +- All modules should use ``ovirt_full_argument_spec`` or + ``ovirt_info_full_argument_spec`` to pick up the standard input (such + as auth and ``fetch_nested``). +- All modules should use ``extends_documentation_fragment``: ovirt to go + along with ``ovirt_full_argument_spec``. +- All info modules should use ``extends_documentation_fragment``: + ``ovirt_info`` to go along with ``ovirt_info_full_argument_spec``. +- Functions that are common to all modules should be implemented in the + ``module_utils/ovirt.py`` file, so they can be reused. +- Python SDK version 4 must be used. + +New module development +---------------------- + +Please read :ref:`developing_modules`, +first to know what common properties, functions and features every module must +have. + +In order to achieve idempotency of oVirt entity attributes, a helper class +was created. The first thing you need to do is to extend this class and override a few +methods: + +.. code:: python + + try: + import ovirtsdk4.types as otypes + except ImportError: + pass + + from ansible.module_utils.ovirt import ( + BaseModule, + equal + ) + + class ClustersModule(BaseModule): + + # The build method builds the entity we want to create. + # Always be sure to build only the parameters the user specified + # in their yaml file, so we don't change the values which we shouldn't + # change. If you set the parameter to None, nothing will be changed. + def build_entity(self): + return otypes.Cluster( + name=self.param('name'), + comment=self.param('comment'), + description=self.param('description'), + ) + + # The update_check method checks if the update is needed to be done on + # the entity. The equal method doesn't check the values which are None, + # which means it doesn't check the values which user didn't set in yaml. + # All other values are checked and if there is found some mismatch, + # the update method is run on the entity, the entity is build by + # 'build_entity' method. You don't have to care about calling the update, + # it's called behind the scene by the 'BaseModule' class. + def update_check(self, entity): + return ( + equal(self.param('comment'), entity.comment) + and equal(self.param('description'), entity.description) + ) + +The code above handle the check if the entity should be updated, so we +don't update the entity if not needed and also it construct the needed +entity of the SDK. + +.. code:: python + + from ansible.module_utils.basic import AnsibleModule + from ansible.module_utils.ovirt import ( + check_sdk, + create_connection, + ovirt_full_argument_spec, + ) + + # This module will support two states of the cluster, + # either it will be present or absent. The user can + # specify three parameters: name, comment and description, + # The 'ovirt_full_argument_spec' function, will merge the + # parameters created here with some common one like 'auth': + argument_spec = ovirt_full_argument_spec( + state=dict( + choices=['present', 'absent'], + default='present', + ), + name=dict(default=None, required=True), + description=dict(default=None), + comment=dict(default=None), + ) + + # Create the Ansible module, please always implement the + # feautre called 'check_mode', for 'create', 'update' and + # 'delete' operations it's implemented by default in BaseModule: + module = AnsibleModule( + argument_spec=argument_spec, + supports_check_mode=True, + ) + + # Check if the user has Python SDK installed: + check_sdk(module) + + try: + auth = module.params.pop('auth') + + # Create the connection to the oVirt engine: + connection = create_connection(auth) + + # Create the service which manages the entity: + clusters_service = connection.system_service().clusters_service() + + # Create the module which will handle create, update and delete flow: + clusters_module = ClustersModule( + connection=connection, + module=module, + service=clusters_service, + ) + + # Check the state and call the appropriate method: + state = module.params['state'] + if state == 'present': + ret = clusters_module.create() + elif state == 'absent': + ret = clusters_module.remove() + + # The return value of the 'create' and 'remove' method is dictionary + # with the 'id' of the entity we manage and the type of the entity + # with filled in attributes of the entity. The 'change' status is + # also returned by those methods: + module.exit_json(**ret) + except Exception as e: + # Modules can't raises exception, it always must exit with + # 'module.fail_json' in case of exception. Always use + # 'exception=traceback.format_exc' for debugging purposes: + module.fail_json(msg=str(e), exception=traceback.format_exc()) + finally: + # Logout only in case the user passed the 'token' in 'auth' + # parameter: + connection.close(logout=auth.get('token') is None) + +If your module must support action handling (for example, +virtual machine start) you must ensure that you handle the states of the +virtual machine correctly, and document the behavior of the +module: + +.. code:: python + + if state == 'running': + ret = vms_module.action( + action='start', + post_action=vms_module._post_start_action, + action_condition=lambda vm: ( + vm.status not in [ + otypes.VmStatus.MIGRATING, + otypes.VmStatus.POWERING_UP, + otypes.VmStatus.REBOOT_IN_PROGRESS, + otypes.VmStatus.WAIT_FOR_LAUNCH, + otypes.VmStatus.UP, + otypes.VmStatus.RESTORING_STATE, + ] + ), + wait_condition=lambda vm: vm.status == otypes.VmStatus.UP, + # Start action kwargs: + use_cloud_init=use_cloud_init, + use_sysprep=use_sysprep, + # ... + ) + +As you can see from the preceding example, the ``action`` method accepts the ``action_condition`` and +``wait_condition``, which are methods which accept the virtual machine +object as a parameter, so you can check whether the virtual +machine is in a proper state before the action. The rest of the +parameters are for the ``start`` action. You may also handle pre- +or post- action tasks by defining ``pre_action`` and ``post_action`` +parameters. + +Testing +------- + +- Integration testing is currently done in oVirt's CI system + `on Jenkins `__ + and + `on GitHub `__. +- Please consider using these integration tests if you create a new module or add a new feature to an existing + module. diff --git a/docs/docsite/rst/dev_guide/platforms/vmware_guidelines.rst b/docs/docsite/rst/dev_guide/platforms/vmware_guidelines.rst new file mode 100644 index 00000000..7a5c8410 --- /dev/null +++ b/docs/docsite/rst/dev_guide/platforms/vmware_guidelines.rst @@ -0,0 +1,270 @@ +.. _VMware_module_development: + +**************************************** +Guidelines for VMware module development +**************************************** + +The Ansible VMware collection (on `Galaxy `_, source code `repository `_) is maintained by the VMware Working Group. For further information see the `team community page `_. + +.. contents:: + :local: + +Testing with govcsim +==================== + +Most of the existing modules are covered by functional tests. The tests are located in the :file:`test/integration/targets/`. + +By default, the tests run against a vCenter API simulator called `govcsim `_. ``ansible-test`` will automatically pull a `govcsim container ` and use it to set-up the test environment. + +You can trigger the test of a module manually with the ``ansible-test`` command. For example, to trigger ``vcenter_folder`` tests: + +.. code-block:: shell + + source hacking/env-setup + ansible-test integration --python 3.7 vcenter_folder + +``govcsim`` is handy because it's much more fast that than a regular test environment. However, it does not +support all the ESXi or vCenter features. + +.. note:: + + Do not confuse ``govcsim`` with ``vcsim``. It's old outdated version of vCenter simulator whereas govcsim is new and written in go lang + +Testing with your own infrastructure +==================================== + +You can also target a regular VMware environment. This paragraph explains step by step how you can run the test-suite yourself. + +Requirements +------------ + +- 2 ESXi hosts (6.5 or 6.7) + - with 2 NIC, the second ones should be available for the test +- a VCSA host +- a NFS server +- Python dependencies: + - `pyvmomi ` + - `requests `. + +If you want to deploy your test environment in a hypervisor, both VMware or Libvirt work well. + +NFS server configuration +~~~~~~~~~~~~~~~~~~~~~~~~ + +Your NFS server must expose the following directory structure: + +.. code-block:: shell + + $ tree /srv/share/ + /srv/share/ + ├── isos + │   ├── base.iso + │   ├── centos.iso + │   └── fedora.iso + └── vms + 2 directories, 3 files + +On a Linux system, you can expose the directory over NFS with the following export file: + +.. code-block:: shell + + $ cat /etc/exports + /srv/share 192.168.122.0/255.255.255.0(rw,anonuid=1000,anongid=1000) + +.. note:: + + With this configuration all the new files will be owned by the user with the UID and GID 1000/1000. + Adjust the configuration to match your user's UID/GID. + +The service can be enabled with: + +.. code-block:: shell + + $ sudo systemctl enable --now nfs-server + + +Configure your installation +--------------------------- + +Prepare a configuration file that describes your set-up. The file +should be called :file:`test/integration/cloud-config-vcenter.ini` and based on +:file:`test/lib/ansible_test/config/cloud-config-vcenter.ini.template`. For instance, if you've deployed your lab with +`vmware-on-libvirt `: + +.. code-block:: ini + + [DEFAULT] + vcenter_username: administrator@vsphere.local + vcenter_password: !234AaAa56 + vcenter_hostname: vcenter.test + vmware_validate_certs: false + esxi1_username: root + esxi1_hostname: esxi1.test + esxi1_password: root + esxi2_username: root + esxi2_hostname: test2.test + esxi2_password: root + +If you use an HTTP proxy +------------------------- +Support for hosting test infrastructure behind an HTTP proxy is currently in development. See the following pull requests for more information: + +- ansible-test: vcenter behind an HTTP proxy +- pyvmomi: proxy support +- VMware: add support for HTTP proxy in connection API + +Once you have incorporated the code from those PRs, specify the location of the proxy server with the two extra keys: + +.. code-block:: ini + + vmware_proxy_host: esxi1-gw.ws.testing.ansible.com + vmware_proxy_port: 11153 + +In addition, you may need to adjust the variables of the following file to match the configuration of your lab: +:file:`test/integration/targets/prepare_vmware_tests/vars/real_lab.yml`. If you use `vmware-on-libvirt ` to prepare you lab, you don't have anything to change. + +Run the test-suite +------------------ + +Once your configuration is ready, you can trigger a run with the following command: + +.. code-block:: shell + + source hacking/env-setup + VMWARE_TEST_PLATFORM=static ansible-test integration --python 3.7 vmware_host_firewall_manager + +``vmware_host_firewall_manager`` is the name of the module to test. + +``vmware_guest`` is much larger than any other test role and is rather slow. You can enable or disable some of its test playbooks in +:file:`test/integration/targets/vmware_guest/defaults/main.yml`. + + +Unit-test +========= + +The VMware modules have limited unit-test coverage. You can run the test suite with the +following commands: + +.. code-block:: shell + + source hacking/env-setup + ansible-test units --venv --python 3.7 '.*vmware.*' + +Code style and best practice +============================ + +datacenter argument with ESXi +----------------------------- + +The ``datacenter`` parameter should not use ``ha-datacenter`` by default. This is because the user may +not realize that Ansible silently targets the wrong data center. + +esxi_hostname should not be mandatory +------------------------------------- + +Depending upon the functionality provided by ESXi or vCenter, some modules can seamlessly work with both. In this case, +``esxi_hostname`` parameter should be optional. + +.. code-block:: python + + if self.is_vcenter(): + esxi_hostname = module.params.get('esxi_hostname') + if not esxi_hostname: + self.module.fail_json("esxi_hostname parameter is mandatory") + self.host = self.get_all_host_objs(cluster_name=cluster_name, esxi_host_name=esxi_hostname)[0] + else: + self.host = find_obj(self.content, [vim.HostSystem], None) + if self.host is None: + self.module.fail_json(msg="Failed to find host system.") + +Example should use the fully qualified collection name (FQCN) +------------------------------------------------------------- + +Use FQCN for examples within module documentation For instance, you should use ``community.vmware.vmware_guest`` instead of just +``vmware_guest``. + +This way, the examples don't depend on the ``collections`` directive of the +playbook. + +Functional tests +---------------- + +Writing new tests +~~~~~~~~~~~~~~~~~ + +If you are writing a new collection of integration tests, there are a few VMware-specific things to note beyond +the standard Ansible :ref:`integration testing` process. + +The test-suite uses a set of common, pre-defined vars located in the :file:`test/integration/targets/prepare_vmware_tests/` role. +The resources defined there are automatically created by importing that role at the start of your test: + +.. code-block:: yaml + + - import_role: + name: prepare_vmware_tests + vars: + setup_datacenter: true + +This will give you a ready to use cluster, datacenter, datastores, folder, switch, dvswitch, ESXi hosts, and VMs. + +No need to create too much resources +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Most of the time, it's not necessary to use ``with_items`` to create multiple resources. By avoiding it, +you speed up the test execution and you simplify the clean up afterwards. + +VM names should be predictable +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If you need to create a new VM during your test, you can use ``test_vm1``, ``test_vm2`` or ``test_vm3``. This +way it will be automatically clean up for you. + +Avoid the common boiler plate code in your test playbook +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +From Ansible 2.10, the test suite uses `modules_defaults`. This module +allow us to preinitialize the following default keys of the VMware modules: + +- hostname +- username +- password +- validate_certs + +For example, the following block: + +.. code-block:: yaml + + - name: Add a VMware vSwitch + vmware_vswitch: + hostname: '{{ vcenter_hostname }}' + username: '{{ vcenter_username }}' + password: '{{ vcenter_password }}' + validate_certs: 'no' + esxi_hostname: 'esxi1' + switch_name: "boby" + state: present + +should be simplified to just: + +.. code-block:: yaml + + - name: Add a VMware vSwitch + vmware_vswitch: + esxi_hostname: 'esxi1' + switch_name: "boby" + state: present + + +Typographic convention +====================== + +Nomenclature +------------ + +We try to enforce the following rules in our documentation: + +- VMware, not VMWare or vmware +- ESXi, not esxi or ESXI +- vCenter, not vcenter or VCenter + +We also refer to vcsim's Go implementation with ``govcsim``. This to avoid any confusion with the outdated implementation. diff --git a/docs/docsite/rst/dev_guide/shared_snippets/licensing.txt b/docs/docsite/rst/dev_guide/shared_snippets/licensing.txt new file mode 100644 index 00000000..2802c420 --- /dev/null +++ b/docs/docsite/rst/dev_guide/shared_snippets/licensing.txt @@ -0,0 +1,9 @@ +.. note:: + **LICENSING REQUIREMENTS** Ansible enforces the following licensing requirements: + + * Utilities (files in ``lib/ansible/module_utils/``) may have one of two licenses: + * A file in ``module_utils`` used **only** for a specific vendor's hardware, provider, or service may be licensed under GPLv3+. + Adding a new file under ``module_utils`` with GPLv3+ needs to be approved by the core team. + * All other ``module_utils`` must be licensed under BSD, so GPL-licensed third-party and Galaxy modules can use them. + * If there's doubt about the appropriate license for a file in ``module_utils``, the Ansible Core Team will decide during an Ansible Core Community Meeting. + * All other files shipped with Ansible, including all modules, must be licensed under the GPL license (GPLv3 or later). diff --git a/docs/docsite/rst/dev_guide/style_guide/basic_rules.rst b/docs/docsite/rst/dev_guide/style_guide/basic_rules.rst new file mode 100644 index 00000000..034aece5 --- /dev/null +++ b/docs/docsite/rst/dev_guide/style_guide/basic_rules.rst @@ -0,0 +1,69 @@ +.. _styleguide_basic: + +Basic rules +=========== +.. contents:: + :local: + +Use standard American English +----------------------------- +Ansible uses Standard American English. Watch for common words that are spelled differently in American English (color vs colour, organize vs organise, and so on). + +Write for a global audience +--------------------------- +Everything you say should be understandable by people of different backgrounds and cultures. Avoid idioms and regionalism and maintain a neutral tone that cannot be misinterpreted. Avoid attempts at humor. + +Follow naming conventions +------------------------- +Always follow naming conventions and trademarks. + +.. good place to link to an Ansible terminology page + +Use clear sentence structure +---------------------------- +Clear sentence structure means: + +- Start with the important information first. +- Avoid padding/adding extra words that make the sentence harder to understand. +- Keep it short - Longer sentences are harder to understand. + +Some examples of improving sentences: + +Bad: + The unwise walking about upon the area near the cliff edge may result in a dangerous fall and therefore it is recommended that one remains a safe distance to maintain personal safety. + +Better: + Danger! Stay away from the cliff. + +Bad: + Furthermore, large volumes of water are also required for the process of extraction. + +Better: + Extraction also requires large volumes of water. + +Avoid verbosity +--------------- +Write short, succinct sentences. Avoid terms like: + +- "...as has been said before," +- "..each and every," +- "...point in time," +- "...in order to," + +Highlight menu items and commands +--------------------------------- +When documenting menus or commands, it helps to **bold** what is important. + +For menu procedures, bold the menu names, button names, and so on to help the user find them on the GUI: + +1. On the **File** menu, click **Open**. +2. Type a name in the **User Name** field. +3. In the **Open** dialog box, click **Save**. +4. On the toolbar, click the **Open File** icon. + +For code or command snippets, use the RST `code-block directive `_:: + + .. code-block:: bash + + ssh my_vyos_user@vyos.example.net + show config diff --git a/docs/docsite/rst/dev_guide/style_guide/grammar_punctuation.rst b/docs/docsite/rst/dev_guide/style_guide/grammar_punctuation.rst new file mode 100644 index 00000000..4505e2d0 --- /dev/null +++ b/docs/docsite/rst/dev_guide/style_guide/grammar_punctuation.rst @@ -0,0 +1,201 @@ + +Grammar and Punctuation +`````````````````````````````````````` + +Common Styles and Usage, and Common Mistakes +---------------------------------------------------- + +Ansible +~~~~~~~~~ +* Write "Ansible." Not "Ansible, Inc." or "AnsibleWorks The only exceptions to this rule are when we're writing legal or financial statements. + +* Never use the logotype by itself in body text. Always keep the same font you are using the rest of the sentence. + +* A company is singular in the US. In other words, Ansible is an "it," not a "they." + + +Capitalization +~~~~~~~~~~~~~~ +If it's not a real product, service, or department at Ansible, don't capitalize it. Not even if it seems important. Capitalize only the first letter of the first word in headlines. + +Colon +~~~~~~~~~~~~~~~~~ +A colon is generally used before a list or series: +- The Triangle Area consists of three cities: Raleigh, Durham, and Chapel Hill. + +But not if the list is a complement or object of an element in the sentence: +- Before going on vacation, be sure to (1) set the alarm, (2) cancel the newspaper, and (3) ask a neighbor to collect your mail. + +Use a colon after "as follows" and "the following" if the related list comes immediately after: +wedge The steps for changing directories are as follows: + + 1. Open a terminal. + 2. Type cd... + +Use a colon to introduce a bullet list (or dash, or icon/symbol of your choice): + + In the Properties dialog box, you'll find the following entries: + + - Connection name + - Count + - Cost per item + + +Commas +~~~~~~~~~~~ +Use serial commas, the comma before the "and" in a series of three or more items: + +- "Item 1, item 2, and item 3." + + +It's easier to read that way and helps avoid confusion. The primary exception to this you will see is in PR, where it is traditional not to use serial commas because it is often the style of journalists. + +Commas are always important, considering the vast difference in meanings of the following two statements. + +- Let's eat, Grandma +- Let's eat Grandma. + +Correct punctuation could save Grandma's life. + +If that does not convince you, maybe this will: + +.. image:: images/commas-matter.jpg + + +Contractions +~~~~~~~~~~~~~ +Do not use contractions in Ansible documents. + +Em dashes +~~~~~~~~~~ +When possible, use em-dashes with no space on either side. When full em-dashes aren't available, use double-dashes with no spaces on either side--like this. + +A pair of em dashes can be used in place of commas to enhance readability. Note, however, that dashes are always more emphatic than commas. + +A pair of em dashes can replace a pair of parentheses. Dashes are considered less formal than parentheses; they are also more intrusive. If you want to draw attention to the parenthetical content, use dashes. If you want to include the parenthetical content more subtly, use parentheses. + +.. note:: + When dashes are used in place of parentheses, surrounding punctuation should be omitted. Compare the following examples. + +:: + + Upon discovering the errors (all 124 of them), the publisher immediately recalled the books. + + Upon discovering the errors—all 124 of them—the publisher immediately recalled the books. + + +When used in place of parentheses at the end of a sentence, only a single dash is used. + +:: + + After three weeks on set, the cast was fed up with his direction (or, rather, lack of direction). + + After three weeks on set, the cast was fed up with his direction—or, rather, lack of direction. + + +Exclamation points (!) +~~~~~~~~~~~~~~~~~~~~~~~ +Do not use them at the end of sentences. An exclamation point can be used when referring to a command, such as the bang (!) command. + +Gender References +~~~~~~~~~~~~~~~~~~ +Do not use gender-specific pronouns in documentation. It is far less awkward to read a sentence that uses "they" and "their" rather than "he/she" and "his/hers." + +It is fine to use "you" when giving instructions and "the user," "new users," and so on. in more general explanations. + +Never use "one" in place of "you" when writing technical documentation. Using "one" is far too formal. + +Never use "we" when writing. "We" aren't doing anything on the user side. Ansible's products are doing the work as requested by the user. + + +Hyphen +~~~~~~~~~~~~~~ +The hyphen's primary function is the formation of certain compound terms. Do not use a hyphen unless it serves a purpose. If a compound adjective cannot be misread or, as with many psychological terms, its meaning is established, a hyphen is not necessary. + +Use hyphens to avoid ambiguity or confusion: + +:: + + a little-used car + a little used-car + + cross complaint + cross-complaint + + high-school girl + high schoolgirl + + fine-tooth comb (most people do not comb their teeth) + + third-world war + third world war + +.. image:: images/hyphen-funny.jpg + +In professionally printed material (particularly books, magazines, and newspapers), the hyphen is used to divide words between the end of one line and the beginning of the next. This allows for an evenly aligned right margin without highly variable (and distracting) word spacing. + + +Lists +~~~~~~~ +Keep the structure of bulleted lists equivalent and consistent. If one bullet is a verb phrase, they should all be verb phrases. If one is a complete sentence, they should all be complete sentences, and so on. + +Capitalize the first word of each bullet. Unless it is obvious that it is just a list of items, such as a list of items like: +* computer +* monitor +* keyboard +* mouse + +When the bulleted list appears within the context of other copy, (unless it's a straight list like the previous example) add periods, even if the bullets are sentence fragments. Part of the reason behind this is that each bullet is said to complete the original sentence. + +In some cases where the bullets are appearing independently, such as in a poster or a homepage promotion, they do not need periods. + +When giving instructional steps, use numbered lists instead of bulleted lists. + + +Months and States +~~~~~~~~~~~~~~~~~~~~ +Abbreviate months and states according to AP. Months are only abbreviated if they are used in conjunction with a day. Example: "The President visited in January 1999." or "The President visited Jan. 12." + +Months: Jan., Feb., March, April, May, June, July, Aug., Sept., Nov., Dec. + +States: Ala., Ariz., Ark., Calif., Colo., Conn., Del., Fla., Ga., Ill., Ind., Kan., Ky., La., Md., Mass., Mich., Minn., Miss., Mo., Mont., Neb., Nev., NH, NJ, NM, NY, NC, ND, Okla., Ore., Pa., RI, SC, SD, Tenn., Vt., Va., Wash., W.Va., Wis., Wyo. + +Numbers +~~~~~~~~~ +Numbers between one and nine are written out. 10 and above are numerals. The exception to this is writing "4 million" or "4 GB." It's also acceptable to use numerals in tables and charts. + +Phone Numbers ++++++++++++++++ + +Phone number style: 1 (919) 555-0123 x002 and 1 888-GOTTEXT + + +Quotations (Using Quotation Marks and Writing Quotes) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + "Place the punctuation inside the quotes," the editor said. + +Except in rare instances, use only "said" or "says" because anything else just gets in the way of the quote itself, and also tends to editorialize. + +Place the name first right after the quote: + "I like to write first-person because I like to become the character I'm writing," Wally Lamb said. + +Not: + "I like to write first-person because I like to become the character I'm writing," said Wally Lamb. + + +Semicolon +~~~~~~~~~~~~~~~ +Use a semicolon to separate items in a series if the items contain commas: + +- Everyday I have coffee, toast, and fruit for breakfast; a salad for lunch; and a peanut butter sandwich, cookies, ice cream, and chocolate cake for dinner. + +Use a semicolon before a conjunctive adverb (however, therefore, otherwise, namely, for example, and so on): +- I think; therefore, I am. + +Spacing after sentences +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Use only a single space after a sentence. + +Time +~~~~~~~~ +* Time of day is written as "4 p.m." diff --git a/docs/docsite/rst/dev_guide/style_guide/images/commas-matter-2.jpg b/docs/docsite/rst/dev_guide/style_guide/images/commas-matter-2.jpg new file mode 100644 index 00000000..2dec81c4 Binary files /dev/null and b/docs/docsite/rst/dev_guide/style_guide/images/commas-matter-2.jpg differ diff --git a/docs/docsite/rst/dev_guide/style_guide/images/commas-matter.jpg b/docs/docsite/rst/dev_guide/style_guide/images/commas-matter.jpg new file mode 100644 index 00000000..1699a31a Binary files /dev/null and b/docs/docsite/rst/dev_guide/style_guide/images/commas-matter.jpg differ diff --git a/docs/docsite/rst/dev_guide/style_guide/images/hyphen-funny.jpg b/docs/docsite/rst/dev_guide/style_guide/images/hyphen-funny.jpg new file mode 100644 index 00000000..d642703f Binary files /dev/null and b/docs/docsite/rst/dev_guide/style_guide/images/hyphen-funny.jpg differ diff --git a/docs/docsite/rst/dev_guide/style_guide/images/thenvsthan.jpg b/docs/docsite/rst/dev_guide/style_guide/images/thenvsthan.jpg new file mode 100644 index 00000000..f4851b07 Binary files /dev/null and b/docs/docsite/rst/dev_guide/style_guide/images/thenvsthan.jpg differ diff --git a/docs/docsite/rst/dev_guide/style_guide/index.rst b/docs/docsite/rst/dev_guide/style_guide/index.rst new file mode 100644 index 00000000..a50a3180 --- /dev/null +++ b/docs/docsite/rst/dev_guide/style_guide/index.rst @@ -0,0 +1,244 @@ +.. _style_guide: + +******************* +Ansible style guide +******************* + +Welcome to the Ansible style guide! +To create clear, concise, consistent, useful materials on docs.ansible.com, follow these guidelines: + +.. contents:: + :local: + +Linguistic guidelines +===================== + +We want the Ansible documentation to be: + +* clear +* direct +* conversational +* easy to translate + +We want reading the docs to feel like having an experienced, friendly colleague +explain how Ansible works. + +Stylistic cheat-sheet +--------------------- + +This cheat-sheet illustrates a few rules that help achieve the "Ansible tone": + ++-------------------------------+------------------------------+----------------------------------------+ +| Rule | Good example | Bad example | ++===============================+==============================+========================================+ +| Use active voice | You can run a task by | A task can be run by | ++-------------------------------+------------------------------+----------------------------------------+ +| Use the present tense | This command creates a | This command will create a | ++-------------------------------+------------------------------+----------------------------------------+ +| Address the reader | As you expand your inventory | When the number of managed nodes grows | ++-------------------------------+------------------------------+----------------------------------------+ +| Use standard English | Return to this page | Hop back to this page | ++-------------------------------+------------------------------+----------------------------------------+ +| Use American English | The color of the output | The colour of the output | ++-------------------------------+------------------------------+----------------------------------------+ + +Header case +----------- + +Headers should be written in sentence case. For example, this section's title is +``Header case``, not ``Header Case`` or ``HEADER CASE``. + + +Avoid using Latin phrases +------------------------- + +Latin words and phrases like ``e.g.`` or ``etc.`` +are easily understood by English speakers. +They may be harder to understand for others and are also tricky for automated translation. + +Use the following English terms in place of Latin terms or abbreviations: + ++-------------------------------+------------------------------+ +| Latin | English | ++===============================+==============================+ +| i.e | in other words | ++-------------------------------+------------------------------+ +| e.g. | for example | ++-------------------------------+------------------------------+ +| etc | and so on | ++-------------------------------+------------------------------+ +| via | by/ through | ++-------------------------------+------------------------------+ +| vs./versus | rather than/against | ++-------------------------------+------------------------------+ + + +reStructuredText guidelines +=========================== + +The Ansible documentation is written in reStructuredText and processed by Sphinx. +We follow these technical or mechanical guidelines on all rST pages: + +Header notation +--------------- + +`Section headers in reStructuredText `_ +can use a variety of notations. +Sphinx will 'learn on the fly' when creating a hierarchy of headers. +To make our documents easy to read and to edit, we follow a standard set of header notations. +We use: + +* ``###`` with overline, for parts: + +.. code-block:: rst + + ############### + Developer guide + ############### + +* ``***`` with overline, for chapters: + +.. code-block:: rst + + ******************* + Ansible style guide + ******************* + +* ``===`` for sections: + +.. code-block:: rst + + Mechanical guidelines + ===================== + +* ``---`` for subsections: + +.. code-block:: rst + + Internal navigation + ------------------- + +* ``^^^`` for sub-subsections: + +.. code-block:: rst + + Adding anchors + ^^^^^^^^^^^^^^ + +* ``"""`` for paragraphs: + +.. code-block:: rst + + Paragraph that needs a title + """""""""""""""""""""""""""" + + +Internal navigation +------------------- + +`Anchors (also called labels) and links `_ +work together to help users find related content. +Local tables of contents also help users navigate quickly to the information they need. +All internal links should use the ``:ref:`` syntax. +Every page should have at least one anchor to support internal ``:ref:`` links. +Long pages, or pages with multiple levels of headers, can also include a local TOC. + +.. _adding_anchors_rst: + +Adding anchors +^^^^^^^^^^^^^^ + +* Include at least one anchor on every page +* Place the main anchor above the main header +* If the file has a unique title, use that for the main page anchor:: + + .. _unique_page:: + +* You may also add anchors elsewhere on the page + +Adding internal links +^^^^^^^^^^^^^^^^^^^^^ + +* All internal links must use ``:ref:`` syntax. These links both point to the anchor defined above: + +.. code-block:: rst + + :ref:`unique_page` + :ref:`this page ` + +The second example adds custom text for the link. + +Adding links to modules and plugins +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Ansible 2.10 and later require the extended Fully Qualified Collection Name (FQCN) as part of the links: + +.. code-block:: text + + ansible_collections. + FQCN + _module + +For example: + + .. code-block:: rst + + :ref:`ansible.builtin.first_found lookup plugin ` + +displays as :ref:`ansible.builtin.first_found lookup plugin `. + +Modules require different suffixes from other plugins: + +* Module links use this extended FQCN module name with ``_module`` for the anchor. +* Plugin links use this extended FQCN plugin name with the plugin type (``_connection`` for example). + +.. code-block:: rst + + :ref:`arista.eos.eos_config ` + :ref:`community.kubernetes.kubectl connection plugin ` + +.. note:: + + ``ansible.builtin`` is the FQCN for modules included in ``ansible.base``. Documentation links are the only place you prepend ``ansible_collections`` to the FQCN. This is used by the documentation build scripts to correctly fetch documentation from collections on Ansible Galaxy. + +.. _local_toc: + +Adding local TOCs +^^^^^^^^^^^^^^^^^ + +The page you're reading includes a `local TOC `_. +If you include a local TOC: + +* place it below, not above, the main heading and (optionally) introductory text +* use the ``:local:`` directive so the page's main header is not included +* do not include a title + +The syntax is: + +.. code-block:: rst + + .. contents:: + :local: + +More resources +============== + +These pages offer more help with grammatical, stylistic, and technical rules for documentation. + +.. toctree:: + :maxdepth: 1 + + basic_rules + voice_style + trademarks + grammar_punctuation + spelling_word_choice + search_hints + resources + +.. seealso:: + + :ref:`community_documentation_contributions` + How to contribute to the Ansible documentation + :ref:`testing_documentation_locally` + How to build the Ansible documentation + `irc.freenode.net `_ + #ansible-docs IRC chat channel diff --git a/docs/docsite/rst/dev_guide/style_guide/resources.rst b/docs/docsite/rst/dev_guide/style_guide/resources.rst new file mode 100644 index 00000000..c624b12e --- /dev/null +++ b/docs/docsite/rst/dev_guide/style_guide/resources.rst @@ -0,0 +1,10 @@ +Resources +```````````````` +* Follow the style of the :ref:`Ansible Documentation` +* Ask for advice on IRC, on the ``#ansible-devel`` Freenode channel +* Review these online style guides: + + * `AP Stylebook `_ + * `Chicago Manual of Style `_ + * `Strunk and White's Elements of Style `_ + diff --git a/docs/docsite/rst/dev_guide/style_guide/search_hints.rst b/docs/docsite/rst/dev_guide/style_guide/search_hints.rst new file mode 100644 index 00000000..d9bf3f66 --- /dev/null +++ b/docs/docsite/rst/dev_guide/style_guide/search_hints.rst @@ -0,0 +1,48 @@ + +.. _search_hints: + +Writing documentation so search can find it +------------------------------------------- + +One of the keys to writing good documentation is to make it findable. Readers use a combination of internal site search and external search engines such as Google or duckduckgo. + +To ensure Ansible documentation is findable, you should: + +#. Use headings that clearly reflect what you are documenting. +#. Use numbered lists for procedures or high-level steps where possible. +#. Avoid linking to github blobs where possible. + + +Using clear headings in documentation +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +We all use simple English when we want to find something. For example, the title of this page could have been any one of the following: + +* Search optimization +* Findable documentation +* Writing for findability + +What we are really trying to describe is - how do I write documentation so search engines can find my content? That simple phrase is what drove the title of this section. When you are creating your headings for documentation, spend some time to think about what you would type in a search box to find it, or more importantly, how someone less familiar with Ansible would try to find that information. Your heading should be the answer to that question. + +One word of caution - you do want to limit the size of your headings. A full heading such as `How do I write documentation so search engines can find my content?` is too long. Search engines would truncate anything over 50 - 60 characters. Long headings would also wrap on smaller devices such as a smart phone. + +Using numbered lists for `zero position` snippets +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Google can optimize the search results by adding a `feature snippet `_ at the top of the search results. This snippet provides a small window into the documentation on that first search result that adds more detail than the rest of the search results, and can occasionally answer the reader's questions right there, or at least verify that the linked page is what the reader is looking for. + +Google returns the feature snippet in the form of numbered steps. Where possible, you should add a numbered list near the top of your documentation page, where appropriate. The steps can be the exact procedure a reader would follow, or could be a high level introduction to the documentation topic, such as the numbered list at the top of this page. + +Problems with github blobs on search results +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Search engines do not typically return github blobs in search results, at least not in higher ranked positions. While it is possible and sometimes necessary to link to github blobs from documentation, the better approach would be to copy that information into an .rst page in Ansible documentation. + +Other search hints +^^^^^^^^^^^^^^^^^^ + +While it may not be possible to adapt your documentation to all search optimizations, keep the following in mind as you write your documentation: + +* **Search engines don't parse beyond the `#` in an html page.** So for example, all the subheadings on this page are appended to the main page URL. As such, when I search for 'Using number lists for zero position snippets', the search result would be a link to the top of this page, not a link directly to the subheading I searched for. Using :ref:`local TOCs ` helps alleviate this problem as the reader can scan for the header at top of the page and click to the section they are looking for. For critical documentation, consider creating a new page that can be a direct search result page. + +* **Make your first few sentences clearly describe your page topic.** Search engines return not just the URL, but a short description of the information at the URL. For Ansible documentation, we do not have description metadata embedded on each page. Instead, the search engines return the first couple of sentences (140 characters) on the page. That makes your first sentence or two very important to the reader who is searching for something in Ansible. diff --git a/docs/docsite/rst/dev_guide/style_guide/spelling_word_choice.rst b/docs/docsite/rst/dev_guide/style_guide/spelling_word_choice.rst new file mode 100644 index 00000000..3f6d8d7b --- /dev/null +++ b/docs/docsite/rst/dev_guide/style_guide/spelling_word_choice.rst @@ -0,0 +1,327 @@ +Spelling - Word Usage - Common Words and Phrases to Use and Avoid +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Acronyms +++++++++++++++++ + +Always uppercase. An acronym is a word formed from the initial letters of a name, such as ROM for Read-only memory, +SaaS for Software as a Service, or by combining initial letters or part of a series of words, such as LILO for LInux +LOader. + +Spell out the acronym before using it in alone text, such as "The Embedded DevKit (EDK)..." + +Applications ++++++++++++++++++++ +When used as a proper name, use the capitalization of the product, such as GNUPro, Source-Navigator, and Ansible Tower. When used as a command, use lowercase as appropriate, such as "To start GCC, type ``gcc``." + +.. note:: + + "vi" is always lowercase. + +As +++++++++ +This is often used to mean "because", but has other connotations, for example, parallel or simultaneous actions. If you mean "because", say "because". + +Asks for +++++++++++++++++ +Use "requests" instead. + +Assure/Ensure/Insure +++++++++++++++++++++++++++++ +Assure implies a sort of mental comfort. As in "I assured my husband that I would eventually bring home beer." + +Ensure means "to make sure." + +Insure relates to monetary insurance. + + +Back up +++++++++++++++ +This is a verb. You "back up" files; you do not "backup" files. + +Backup +++++++++++ +This is a noun. You create "backup" files; you do not create "back up" files. + +Backward +++++++++++++++ +Correct. Avoid using backwards unless you are stating that something has "backwards compatibility." + +Backwards compatibility +++++++++++++++++++++++++ +Correct as is. + +By way of +++++++++++++++++++ +Use "using" instead. + +Can/May +++++++++++++++ +Use "can" to describe actions or conditions that are possible. Use "may" only to describe situations where permission is being given. If either "can," "could," or "may" apply, use "can" because it's less tentative. + +CD or cd ++++++++++++++++ +When referring to a compact disk, use CD, such as "Insert the CD into the CD-ROM drive." When referring to the change directory command, use cd. + +CD-ROM ++++++++++++++ +Correct. Do not use "cdrom," "CD-Rom," "CDROM," "cd-rom" or any other variation. When referring to the drive, use CD-ROM drive, such as "Insert the CD into the CD-ROM drive." The plural is "CD-ROMs." + + +Command line ++++++++++++++++++++ +Correct. Do not use "command-line" or "commandline" as a noun. If used as an adjective, "command-line" is appropriate, for example "command-line arguments". + +Use "command line" to describes where to place options for a command, but not where to type the command. Use "shell prompt" instead to describe where to type commands. The line on the display screen where a command is expected. Generally, the command line is the line that contains the most recently displayed command prompt. + + +Daylight saving time (DST) ++++++++++++++++++++++++++++++++ + +Correct. Do not use daylight savings time. Daylight Saving Time (DST) is often misspelled "Daylight Savings", with an "s" at the end. Other common variations are "Summer Time"and "Daylight-Saving Time". (https://www.timeanddate.com/time/dst/daylight-savings-time.html) + + +Download +++++++++++++++++ +Correct. Do not use "down load" or "down-load." + +e.g. +++++++++++ +Spell it out: "For example." + +Failover ++++++++++++++++ +When used as a noun, a failover is a backup operation that automatically switches to a standby database, server or network if the primary system fails or is temporarily shut down for servicing. Failover is an important fault tolerance function of mission-critical systems that rely on constant accessibility. Failover automatically and transparently to the user redirects requests from the failed or down system to the backup system that mimics the operations of the primary system. + +Fail over +++++++++++++ +When used as a verb, fail over is two words since there can be different tenses such as failed over. + +Fewer ++++++++++++++++++++ +Fewer is used with plural nouns. Think things you could count. Time, money, distance, and weight are often listed as exceptions to the traditional "can you count it" rule, often thought of a singular amounts (the work will take less than 5 hours, for example). + +File name ++++++++++++++ +Correct. Do not use "filename." + +File system ++++++++++++++++++++ +Correct. Do not use "filesystem." The system that an operating system or program uses to organize and keep track of files. For example, a hierarchical file system is one that uses directories to organize files into a tree structure. Although the operating system provides its own file management system, you can buy separate file management systems. These systems interact smoothly with the operating system but provide more features, such as improved backup procedures and stricter file protection. + +For instance +++++++++++++++ +For example," instead. + +For further/additional/whatever information +++++++++++++++++++++++++++++++++++++++++++++++ +Use "For more information" + +For this reason +++++++++++++++++++ +Use "therefore". + +Forward +++++++++++++++ +Correct. Avoid using "forwards." + +Gigabyte (GB) +++++++++++++++ +2 to the 30th power (1,073,741,824) bytes. One gigabyte is equal to 1,024 megabytes. Gigabyte is often abbreviated as G or GB. + +Got +++++++++++++++ +Avoid. Use "must" instead. + +High-availability +++++++++++++++++++ +Correct. Do not use "high availability." + +Highly available +++++++++++++++++++ +Correct. Do not use highly-available." + +Hostname ++++++++++++++++++ +Correct. Do not use host name. + +i.e. +++++++++++++++ +Spell it out: "That is." + +Installer +++++++++++++++ +Avoid. Use "installation program" instead. + +It's and its +++++++++++++++ +"It's" is a contraction for "it is;" use "it is" instead of "it's." Use "its" as a possessive pronoun (for example, "the store is known for its low prices"). + +Less +++++++++++++ +Less is used with singular nouns. For example "View less details" wouldn't be correct but "View less detail" works. Use fewer when you have plural nouns (things you can count). + +Linux +++++++++++++++ +Correct. Do not use "LINUX" or "linux" unless referring to a command, such as "To start Linux, type linux." Linux is a registered trademark of Linus Torvalds. + +Login +++++++++++++++ +A noun used to refer to the login prompt, such as "At the login prompt, enter your username." + +Log in +++++++++++++++ +A verb used to refer to the act of logging in. Do not use "login," "loggin," "logon," and other variants. For example, "When starting your computer, you are requested to log in..." + +Log on +++++++++++++++ +To make a computer system or network recognize you so that you can begin a computer session. Most personal computers have no log-on procedure -- you just turn the machine on and begin working. For larger systems and networks, however, you usually need to enter a username and password before the computer system will allow you to execute programs. + +Lots of +++++++++++++++ +Use "Several" or something equivalent instead. + +Make sure +++++++++++++++ +This means "be careful to remember, attend to, or find out something." For example, "...make sure that the rhedk group is listed in the output." +Try to use verify or ensure instead. + +Manual/man page +++++++++++++++++++ +Correct. Two words. Do not use "manpage" + +MB +++++++++ +(1) When spelled MB, short for megabyte (1,000,000 or 1,048,576 bytes, depending on the context). +(2) When spelled Mb, short for megabit. + +MBps +++++++++++++++ +Short for megabytes per second, a measure of data transfer speed. Mass storage devices are generally measured in MBps. + +MySQL +++++++++++++++ +Common open source database server and client package. Do not use "MYSQL" or "mySQL." + +Need to +++++++++++++++ +Avoid. Use "must" instead. + +Read-only +++++++++++++ +Correct. Use when referring to the access permissions of files or directories. + +Real time/real-time +++++++++++++++++++++++ +Depends. If used as a noun, it is the actual time during which something takes place. For example, "The computer may partly analyze the data in real time (as it comes in) -- R. H. March." If used as an adjective, "real-time" is appropriate. For example, "XEmacs is a self-documenting, customizable, extensible, real-time display editor." + +Refer to +++++++++++++++ +Use to indicate a reference (within a manual or website) or a cross-reference (to another manual or documentation source). + +See +++++++++++++++ +Don't use. Use "Refer to" instead. + +Since +++++++++ +This is often used to mean "because", but "since" has connotations of time, so be careful. If you mean "because", say "because". + +Tells +++++++++++++++ +Use "Instructs" instead. + +That/which +++++++++++++++ +"That" introduces a restrictive clause-a clause that must be there for the sentence to make sense. A restrictive clause often defines the noun or phrase preceding it. "Which" introduces a non-restrictive, parenthetical clause-a clause that could be omitted without affecting the meaning of the sentence. For example: The car was travelling at a speed that would endanger lives. The car, which was traveling at a speed that would endanger lives, swerved onto the sidewalk. Use "who" or "whom," rather than "that" or "which," when referring to a person. + +Then/than +++++++++++++++ + "Then" refers to a time in the past or the next step in a sequence. "Than" is used for comparisons. + +.. image:: images/thenvsthan.jpg + +Third-party +++++++++++++++ +Correct. Do not use "third party". + +Troubleshoot +++++++++++++++ +Correct. Do not use "trouble shoot" or "trouble-shoot." To isolate the source of a problem and fix it. In the case of computer systems, the term troubleshoot is usually used when the problem is suspected to be hardware -related. If the problem is known to be in software, the term debug is more commonly used. + +UK +++++++++++++++ +Correct as is, no periods. + +UNIX® +++++++++++++++ +Correct. Do not use "Unix" or "unix." UNIX® is a registered trademark of The Open Group. + +Unset +++++++++++++++ +Don't use. Use Clear. + +US +++++++++++++++ +Correct as is, no periods. + +User +++++++++++++++ +When referring to the reader, use "you" instead of "user." For example, "The user must..." is incorrect. Use "You must..." instead. If referring to more than one user, calling the collection "users" is acceptable, such as "Other users may wish to access your database." + +Username +++++++++++++++ +Correct. Do not use "user name." + +View +++++++++++++++ +When using as a reference ("View the documentation available online."), do not use View. Use "Refer to" instead. + +Within +++++++++++++++ +Don't use to refer to a file that exists in a directory. Use "In". + +World Wide Web +++++++++++++++ +Correct. Capitalize each word. Abbreviate as "WWW" or "Web." + +Webpage +++++++++++++++ +Correct. Do not use "web page" or "Web page." + +Web server +++++++++++++++ +Correct. Do not use "webserver". For example, "The Apache HTTP Server is the default Web server..." + +Website +++++++++++++++ +Correct. Do not use "web site" or "Web site." For example, "The Ansible website contains ..." + +Who/whom +++++++++++++++ +Use the pronoun "who" as a subject. Use the pronoun "whom" as a direct object, an indirect object, or the object of a preposition. For example: Who owns this? To whom does this belong? + +Will +++++++++++++++ +Do not use future tense unless it is absolutely necessary. For instance, do not use the sentence, "The next section will describe the process in more detail." Instead, use the sentence, "The next section describes the process in more detail." + +Wish +++++++++++++++ +Use "need" instead of "desire" and "wish." Use "want" when the reader's actions are optional (that is, they may not "need" something but may still "want" something). + +x86 +++++++++++++++ +Correct. Do not capitalize the "x." + +x86_64 +++++++++++++++ +Do not use. Do not use "Hammer". Always use "AMD64 and Intel® EM64T" when referring to this architecture. + +You +++++++++++++++ +Correct. Do not use "I," "he," or "she." + +You may +++++++++++++++ +Try to avoid using this. For example, "you may" can be eliminated from this sentence "You may double-click on the desktop..." + diff --git a/docs/docsite/rst/dev_guide/style_guide/trademarks.rst b/docs/docsite/rst/dev_guide/style_guide/trademarks.rst new file mode 100644 index 00000000..266f16bd --- /dev/null +++ b/docs/docsite/rst/dev_guide/style_guide/trademarks.rst @@ -0,0 +1,96 @@ + +Trademark Usage +`````````````````````````````````````` +Why is it important to use the TM, SM, and ® for our registered marks? + +Before a trademark is registered with the United States Patent and Trademark Office it is appropriate to use the TM or SM symbol depending whether the product is for goods or services. It is important to use the TM or SM as it is notification to the public that Ansible claims rights to the mark even though it has not yet been registered. + +Once the trademark is registered, it is appropriate to use the symbol in place of the TM or SM. The symbol designation must be used in conjunction with the trademark if Ansible is to fully protect its rights. If we don't protect these marks, we run the risk of losing them in the way of Aspirin or Trampoline or Escalator. + +General Rules: ++++++++++++++++ + +Trademarks should be used on 1st references on a page or within a section. + +Use Red Hat® Ansible Tower® or Ansible®, on first reference when referring to products. + +Use "Ansible" alone as the company name, as in "Ansible announced quarterly results," which is not marked. + +Also add the trademark disclaimer. +* When using Ansible trademarks in the body of written text, you should use the following credit line in a prominent place, usually a footnote. + + For Registered Trademarks: + - [Name of Trademark] is a registered trademark of Red Hat, Inc. in the United States and other countries. + + For Unregistered Trademarks (TMs/SMs): + - [Name of Trademark] is a trademark of Red Hat, Inc. in the United States and other countries. + + For registered and unregistered trademarks: + - [Name of Trademark] is a registered trademark and [Name of Trademark] is a trademark of Red Hat, Inc. in the United States and other countries. + +Guidelines for the proper use of trademarks: ++++++++++++++++++++++++++++++++++++++++++++++ + + Always distinguish trademarks from surround text with at least initial capital letters or in all capital letters. + +Always use proper trademark form and spelling. + +Never use a trademark as a noun. Always use a trademark as an adjective modifying the noun. + + Correct: + Red Hat® Ansible Tower® system performance is incredible. + + Incorrect: + Ansible's performance is incredible. + +Never use a trademark as a verb. Trademarks are products or services, never actions. + + Correct: + "Orchestrate your entire network using Red Hat® Ansible Tower®." + + Incorrect: + "Ansible your entire network." + +Never modify a trademark to a plural form. Instead, change the generic word from the singular to the plural. + + Correct: + "Corporate demand for Red Hat® Ansible Tower® configuration software is surging." + + Incorrect: + "Corporate demand for Ansible is surging." + +Never modify a trademark from its possessive form, or make a trademark possessive. Always use it in the form it has been registered. + +Never translate a trademark into another language. + +Never use trademarks to coin new words or names. + +Never use trademarks to create a play on words. + +Never alter a trademark in any way including through unapproved fonts or visual identifiers. + +Never abbreviate or use any Ansible trademarks as an acronym. + +The importance of Ansible trademarks +++++++++++++++++++++++++++++++++++++++++++++++++ + +The Ansible trademark and the "A" logo in a shaded circle are our most valuable assets. The value of these trademarks encompass the Ansible Brand. Effective trademark use is more than just a name, it defines the level of quality the customer will receive and it ties a product or service to a corporate image. A trademark may serve as the basis for many of our everyday decisions and choices. The Ansible Brand is about how we treat customers and each other. In order to continue to build a stronger more valuable Brand we must use it in a clear and consistent manner. + +The mark consists of the letter "A" in a shaded circle. As of 5/11/15, this was a pending trademark (registration in process). + +Common Ansible Trademarks ++++++++++++++++++++++++++++++++++++++++ +* Ansible® +* Ansible Tower® + +Other Common Trademarks and Resource Sites: +++++++++++++++++++++++++++++++++++++++++++++++++ +- Linux is a registered trademark of Linus Torvalds. +- UNIX® is a registered trademark of The Open Group. +- Microsoft, Windows, Vista, XP, and NT are registered trademarks or trademarks of Microsoft Corporation in the United States and/or other countries. https://www.microsoft.com/en-us/legal/intellectualproperty/trademarks/en-us.aspx +- Apple, Mac, Mac OS, Macintosh, Pages and TrueType are either registered trademarks or trademarks of Apple Computer, Inc. in the United States and/or other countries. https://www.apple.com/legal/intellectual-property/trademark/appletmlist.html +- Adobe, Acrobat, GoLive, InDesign, Illustrator, PostScript , PhotoShop and the OpenType logo are either registered trademarks or trademarks of Adobe Systems Incorporated in the United States and/or other countries. https://www.adobe.com/legal/permissions/trademarks.html +- Macromedia and Macromedia Flash are trademarks of Macromedia, Inc. https://www.adobe.com/legal/permissions/trademarks.html +- IBM is a registered trademark of International Business Machines Corporation. https://www.ibm.com/legal/us/en/copytrade.shtml +- Celeron, Celeron Inside, Centrino, Centrino logo, Core Inside, Intel Core, Intel Inside, Intel Inside logo, Itanium, Itanium Inside, Pentium, Pentium Inside,VTune, Xeon, and Xeon Inside are trademarks or registered trademarks of Intel Corporation or its subsidiaries in the United States and other countries. https://www.intel.com/content/www/us/en/legal/trademarks.html + diff --git a/docs/docsite/rst/dev_guide/style_guide/voice_style.rst b/docs/docsite/rst/dev_guide/style_guide/voice_style.rst new file mode 100644 index 00000000..0dff7a87 --- /dev/null +++ b/docs/docsite/rst/dev_guide/style_guide/voice_style.rst @@ -0,0 +1,20 @@ + +Voice Style +````````````````````` +The essence of the Ansible writing style is short sentences that flow naturally together. Mix up sentence structures. Vary sentence subjects. Address the reader directly. Ask a question. And when the reader adjusts to the pace of shorter sentences, write a longer one. + +- Write how real people speak... +- ...but try to avoid slang and colloquialisms that might not translate well into other languages. +- Say big things with small words. +- Be direct. Tell the reader exactly what you want them to do. +- Be honest. +- Short sentences show confidence. +- Grammar rules are meant to be bent, but only if the reader knows you are doing this. +- Choose words with fewer syllables for faster reading and better understanding. +- Think of copy as one-on-one conversations rather than as a speech. It's more difficult to ignore someone who is speaking to you directly. +- When possible, start task-oriented sentences (those that direct a user to do something) with action words. For example: Find software... Contact support... Install the media.... and so forth. + +Active Voice +------------------ +Use the active voice ("Start Linuxconf by typing...") rather than passive ("Linuxconf can be started by typing...") whenever possible. Active voice makes for more lively, interesting reading. +Also avoid future tense (or using the term "will") whenever possible For example, future tense ("The screen will display...") does not read as well as an active voice ("The screen displays"). Remember, the users you are writing for most often refer to the documentation while they are using the system, not after or in advance of using the system. diff --git a/docs/docsite/rst/dev_guide/style_guide/why_use.rst b/docs/docsite/rst/dev_guide/style_guide/why_use.rst new file mode 100644 index 00000000..0c1bf51a --- /dev/null +++ b/docs/docsite/rst/dev_guide/style_guide/why_use.rst @@ -0,0 +1,23 @@ +:orphan: + +Why Use a Style Guide? +````````````````````````````````` + +Style guides are important because they ensure consistency in the content, look, and feel of a book or a website. + +Remember, a style guide is only useful if it is used, updated, and enforced. Style Guides are useful for engineering-related documentation, sales and marketing materials, support docs, community contributions, and more. + +As changes are made to the overall Ansible site design, be sure to update this style guide with those changes. Or, should other resources listed below have major revisions, consider including company information here for ease of reference. + +This style guide incorporates current Ansible resources and information so that overall site and documentation consistency can be met. + +.. raw:: html + +
+ + "If you don't find it in the index, look very carefully through the entire catalogue." + ― Sears, Roebuck and Co., 1897 Sears Roebuck & Co. Catalogue + +.. raw:: html + +
diff --git a/docs/docsite/rst/dev_guide/testing.rst b/docs/docsite/rst/dev_guide/testing.rst new file mode 100644 index 00000000..763f1672 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing.rst @@ -0,0 +1,243 @@ +.. _developing_testing: + +*************** +Testing Ansible +*************** + +.. contents:: + :local: + + +Why test your Ansible contributions? +==================================== + +If you're a developer, one of the most valuable things you can do is to look at GitHub issues and help fix bugs, since bug-fixing is almost always prioritized over feature development. Even for non-developers, helping to test pull requests for bug fixes and features is still immensely valuable. + +Ansible users who understand how to write playbooks and roles should be able to test their work. GitHub pull requests will automatically run a variety of tests (for example, Shippable) that show bugs in action. However, contributors must also test their work outside of the automated GitHub checks and show evidence of these tests in the PR to ensure that their work will be more likely to be reviewed and merged. + +Read on to learn how Ansible is tested, how to test your contributions locally, and how to extend testing capabilities. + +If you want to learn about testing collections, read :ref:`testing_collections` + + + +Types of tests +============== + +At a high level we have the following classifications of tests: + +:compile: + * :ref:`testing_compile` + * Test python code against a variety of Python versions. +:sanity: + * :ref:`testing_sanity` + * Sanity tests are made up of scripts and tools used to perform static code analysis. + * The primary purpose of these tests is to enforce Ansible coding standards and requirements. +:integration: + * :ref:`testing_integration` + * Functional tests of modules and Ansible core functionality. +:units: + * :ref:`testing_units` + * Tests directly against individual parts of the code base. + + +If you're a developer, one of the most valuable things you can do is look at the GitHub +issues list and help fix bugs. We almost always prioritize bug fixing over feature +development. + +Even for non developers, helping to test pull requests for bug fixes and features is still +immensely valuable. Ansible users who understand writing playbooks and roles should be +able to add integration tests and so GitHub pull requests with integration tests that show +bugs in action will also be a great way to help. + + +Testing within GitHub & Shippable +================================= + + +Organization +------------ + +When Pull Requests (PRs) are created they are tested using Shippable, a Continuous Integration (CI) tool. Results are shown at the end of every PR. + +When Shippable detects an error and it can be linked back to a file that has been modified in the PR then the relevant lines will be added as a GitHub comment. For example:: + + The test `ansible-test sanity --test pep8` failed with the following errors: + + lib/ansible/modules/network/foo/bar.py:509:17: E265 block comment should start with '# ' + + The test `ansible-test sanity --test validate-modules` failed with the following error: + lib/ansible/modules/network/foo/bar.py:0:0: E307 version_added should be 2.4. Currently 2.3 + +From the above example we can see that ``--test pep8`` and ``--test validate-modules`` have identified an issue. The commands given allow you to run the same tests locally to ensure you've fixed all issues without having to push your changes to GitHub and wait for Shippable, for example: + +If you haven't already got Ansible available, use the local checkout by running:: + + source hacking/env-setup + +Then run the tests detailed in the GitHub comment:: + + ansible-test sanity --test pep8 + ansible-test sanity --test validate-modules + +If there isn't a GitHub comment stating what's failed you can inspect the results by clicking on the "Details" button under the "checks have failed" message at the end of the PR. + +Rerunning a failing CI job +-------------------------- + +Occasionally you may find your PR fails due to a reason unrelated to your change. This could happen for several reasons, including: + +* a temporary issue accessing an external resource, such as a yum or git repo +* a timeout creating a virtual machine to run the tests on + +If either of these issues appear to be the case, you can rerun the Shippable test by: + +* adding a comment with ``/rebuild`` (full rebuild) or ``/rebuild_failed`` (rebuild only failed CI nodes) to the PR +* closing and re-opening the PR (full rebuild) +* making another change to the PR and pushing to GitHub + +If the issue persists, please contact us in ``#ansible-devel`` on Freenode IRC. + + +How to test a PR +================ + +Ideally, code should add tests that prove that the code works. That's not always possible and tests are not always comprehensive, especially when a user doesn't have access to a wide variety of platforms, or is using an API or web service. In these cases, live testing against real equipment can be more valuable than automation that runs against simulated interfaces. In any case, things should always be tested manually the first time as well. + +Thankfully, helping to test Ansible is pretty straightforward, assuming you are familiar with how Ansible works. + +Setup: Checking out a Pull Request +---------------------------------- + +You can do this by: + +* checking out Ansible +* fetching the proposed changes into a test branch +* testing +* commenting on that particular issue on GitHub + +Here's how: + +.. warning:: + Testing source code from GitHub pull requests sent to us does have some inherent risk, as the source code + sent may have mistakes or malicious code that could have a negative impact on your system. We recommend + doing all testing on a virtual machine, whether a cloud instance, or locally. Some users like Vagrant + or Docker for this, but they are optional. It is also useful to have virtual machines of different Linux or + other flavors, since some features (for example, package managers such as apt or yum) are specific to those OS versions. + + +Create a fresh area to work:: + + + git clone https://github.com/ansible/ansible.git ansible-pr-testing + cd ansible-pr-testing + +Next, find the pull request you'd like to test and make note of its number. It will look something like this:: + + Use os.path.sep instead of hardcoding / #65381 + +.. note:: Only test ``ansible:devel`` + + It is important that the PR request target be ``ansible:devel``, as we do not accept pull requests into any other branch. Dot releases are cherry-picked manually by Ansible staff. + +Use the pull request number when you fetch the proposed changes and create your branch for testing:: + + git fetch origin refs/pull/XXXX/head:testing_PRXXXX + git checkout testing_PRXXXX + +The first command fetches the proposed changes from the pull request and creates a new branch named ``testing_PRXXXX``, where the XXXX is the actual number associated with the pull request (for example, 65381). The second command checks out the newly created branch. + +.. note:: + If the GitHub user interface shows that the pull request will not merge cleanly, we do not recommend proceeding if you are not somewhat familiar with git and coding, as you will have to resolve a merge conflict. This is the responsibility of the original pull request contributor. + +.. note:: + Some users do not create feature branches, which can cause problems when they have multiple, unrelated commits in their version of ``devel``. If the source looks like ``someuser:devel``, make sure there is only one commit listed on the pull request. + +The Ansible source includes a script that allows you to use Ansible directly from source without requiring a +full installation that is frequently used by developers on Ansible. + +Simply source it (to use the Linux/Unix terminology) to begin using it immediately:: + + source ./hacking/env-setup + +This script modifies the ``PYTHONPATH`` environment variables (along with a few other things), which will be temporarily +set as long as your shell session is open. + +Testing the Pull Request +------------------------ + +At this point, you should be ready to begin testing! + +Some ideas of what to test are: + +* Create a test Playbook with the examples in and check if they function correctly +* Test to see if any Python backtraces returned (that's a bug) +* Test on different operating systems, or against different library versions + +Run sanity tests +```````````````` + +.. code:: shell + + ansible-test sanity + +More information: :ref:`testing_sanity` + +Run unit tests +`````````````` + +.. code:: shell + + ansible-test units + +More information: :ref:`testing_units` + +Run integration tests +````````````````````` + +.. code:: shell + + ansible-test integration -v ping + +More information: :ref:`testing_integration` + +Any potential issues should be added as comments on the pull request (and it's acceptable to comment if the feature works as well), remembering to include the output of ``ansible --version`` + +Example:: + + Works for me! Tested on `Ansible 2.3.0`. I verified this on CentOS 6.5 and also Ubuntu 14.04. + +If the PR does not resolve the issue, or if you see any failures from the unit/integration tests, just include that output instead: + + | This change causes errors for me. + | + | When I ran this Ubuntu 16.04 it failed with the following: + | + | \``` + | some output + | StackTrace + | some other output + | \``` + +Code Coverage Online +```````````````````` + +`The online code coverage reports `_ are a good way +to identify areas for testing improvement in Ansible. By following red colors you can +drill down through the reports to find files which have no tests at all. Adding both +integration and unit tests which show clearly how code should work, verify important +Ansible functions and increase testing coverage in areas where there is none is a valuable +way to help improve Ansible. + +The code coverage reports only cover the ``devel`` branch of Ansible where new feature +development takes place. Pull requests and new code will be missing from the codecov.io +coverage reports so local reporting is needed. Most ``ansible-test`` commands allow you +to collect code coverage, this is particularly useful to indicate where to extend +testing. See :ref:`testing_running_locally` for more information. + + +Want to know more about testing? +================================ + +If you'd like to know more about the plans for improving testing Ansible then why not join the +`Testing Working Group `_. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/action-plugin-docs.rst b/docs/docsite/rst/dev_guide/testing/sanity/action-plugin-docs.rst new file mode 100644 index 00000000..e3a5d8b8 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/action-plugin-docs.rst @@ -0,0 +1,4 @@ +action-plugin-docs +================== + +Each action plugin should have a matching module of the same name to provide documentation. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/ansible-doc.rst b/docs/docsite/rst/dev_guide/testing/sanity/ansible-doc.rst new file mode 100644 index 00000000..9f2c4f5f --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/ansible-doc.rst @@ -0,0 +1,4 @@ +ansible-doc +=========== + +Verifies that ``ansible-doc`` can parse module documentation on all supported Python versions. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/ansible-var-precedence-check.rst b/docs/docsite/rst/dev_guide/testing/sanity/ansible-var-precedence-check.rst new file mode 100644 index 00000000..1906886f --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/ansible-var-precedence-check.rst @@ -0,0 +1,6 @@ +:orphan: + +ansible-var-precedence-check +============================ + +Check the order of precedence for Ansible variables against :ref:`ansible_variable_precedence`. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/azure-requirements.rst b/docs/docsite/rst/dev_guide/testing/sanity/azure-requirements.rst new file mode 100644 index 00000000..5e0cc044 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/azure-requirements.rst @@ -0,0 +1,10 @@ +:orphan: + +azure-requirements +================== + +Update the Azure integration test requirements file when changes are made to the Azure packaging requirements file: + +.. code-block:: bash + + cp packaging/requirements/requirements-azure.txt test/lib/ansible_test/_data/requirements/integration.cloud.azure.txt diff --git a/docs/docsite/rst/dev_guide/testing/sanity/bin-symlinks.rst b/docs/docsite/rst/dev_guide/testing/sanity/bin-symlinks.rst new file mode 100644 index 00000000..dcec7ed3 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/bin-symlinks.rst @@ -0,0 +1,11 @@ +bin-symlinks +============ + +The ``bin/`` directory in Ansible must contain only symbolic links to executable files. +These files must reside in the ``lib/ansible/`` or ``test/lib/ansible_test/`` directories. + +This is required to allow ``ansible-test`` to work with containers and remote hosts when running from an installed version of Ansible. + +Symlinks for each entry point in ``bin/`` must also be present in ``test/lib/ansible_test/_data/injector/``. +Each symlink should point to the ``python.py`` script in the same directory. +This facilitates running with the correct Python interpreter and enabling code coverage. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/boilerplate.rst b/docs/docsite/rst/dev_guide/testing/sanity/boilerplate.rst new file mode 100644 index 00000000..51c0c089 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/boilerplate.rst @@ -0,0 +1,11 @@ +:orphan: + +boilerplate +=========== + +Most Python files should include the following boilerplate: + +.. code-block:: python + + from __future__ import (absolute_import, division, print_function) + __metaclass__ = type diff --git a/docs/docsite/rst/dev_guide/testing/sanity/botmeta.rst b/docs/docsite/rst/dev_guide/testing/sanity/botmeta.rst new file mode 100644 index 00000000..639bb0bf --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/botmeta.rst @@ -0,0 +1,4 @@ +botmeta +======= + +Verifies that ``./github/BOTMETA.yml`` is valid. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/changelog.rst b/docs/docsite/rst/dev_guide/testing/sanity/changelog.rst new file mode 100644 index 00000000..8cb53329 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/changelog.rst @@ -0,0 +1,17 @@ +changelog +========= + +Basic linting of changelog fragments with `antsibull-changelog lint `_. + +One or more of the following sections are required: + +- major_changes +- minor_changes +- breaking_changes +- deprecated_features +- removed_features +- security_fixes +- bugfixes +- known_issues + +New modules and plugins must not be included in changelog fragments. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/compile.rst b/docs/docsite/rst/dev_guide/testing/sanity/compile.rst new file mode 100644 index 00000000..222f94e4 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/compile.rst @@ -0,0 +1,4 @@ +compile +======= + +See :ref:`testing_compile` for more information. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/configure-remoting-ps1.rst b/docs/docsite/rst/dev_guide/testing/sanity/configure-remoting-ps1.rst new file mode 100644 index 00000000..e83bc78d --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/configure-remoting-ps1.rst @@ -0,0 +1,5 @@ +configure-remoting-ps1 +====================== + +The file ``examples/scripts/ConfigureRemotingForAnsible.ps1`` is required and must be a regular file. +It is used by external automated processes and cannot be moved, renamed or replaced with a symbolic link. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/deprecated-config.rst b/docs/docsite/rst/dev_guide/testing/sanity/deprecated-config.rst new file mode 100644 index 00000000..950805a2 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/deprecated-config.rst @@ -0,0 +1,6 @@ +:orphan: + +deprecated-config +================= + +``DOCUMENTATION`` config is scheduled for removal diff --git a/docs/docsite/rst/dev_guide/testing/sanity/docs-build.rst b/docs/docsite/rst/dev_guide/testing/sanity/docs-build.rst new file mode 100644 index 00000000..23f3c552 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/docs-build.rst @@ -0,0 +1,4 @@ +docs-build +========== + +Verifies that ``make singlehtmldocs`` in ``docs/docsite/`` completes without errors. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/empty-init.rst b/docs/docsite/rst/dev_guide/testing/sanity/empty-init.rst new file mode 100644 index 00000000..e87bb71e --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/empty-init.rst @@ -0,0 +1,10 @@ +empty-init +========== + +The ``__init__.py`` files under the following directories must be empty. For some of these (modules +and tests), ``__init__.py`` files with code won't be used. For others (module_utils), we want the +possibility of using Python namespaces which an empty ``__init__.py`` will allow for. + +- ``lib/ansible/modules/`` +- ``lib/ansible/module_utils/`` +- ``test/units/`` diff --git a/docs/docsite/rst/dev_guide/testing/sanity/future-import-boilerplate.rst b/docs/docsite/rst/dev_guide/testing/sanity/future-import-boilerplate.rst new file mode 100644 index 00000000..9d150e1f --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/future-import-boilerplate.rst @@ -0,0 +1,51 @@ +future-import-boilerplate +========================= + +Most Python files should include the following boilerplate at the top of the file, right after the +comment header: + +.. code-block:: python + + from __future__ import (absolute_import, division, print_function) + +This uses Python 3 semantics for absolute vs relative imports, division, and print. By doing this, +we can write code which is portable between Python 2 and Python 3 by following the Python 3 semantics. + + +absolute_import +--------------- + +When Python 2 encounters an import of a name in a file like ``import copy`` it attempts to load +``copy.py`` from the same directory as the file is in. This can cause problems if there is a python +file of that name in the directory and also a python module in ``sys.path`` with that same name. In +that case, Python 2 would load the one in the same directory and there would be no way to load the +one on ``sys.path``. Python 3 fixes this by making imports absolute by default. ``import copy`` +will find ``copy.py`` from ``sys.path``. If you want to import ``copy.py`` from the same directory, +the code needs to be changed to perform a relative import: ``from . import copy``. + +.. seealso:: + + * `Absolute and relative imports `_ + +division +-------- + +In Python 2, the division operator (``/``) returns integer values when used with integers. If there +was a remainder, this part would be left off (aka, `floor division`). In Python 3, the division +operator (``/``) always returns a floating point number. Code that needs to calculate the integer +portion of the quotient needs to switch to using the floor division operator (`//`) instead. + +.. seealso:: + + * `Changing the division operator `_ + +print_function +-------------- + +In Python 2, :func:`python:print` is a keyword. In Python 3, :func:`python3:print` is a function with different +parameters. Using this ``__future__`` allows using the Python 3 print semantics everywhere. + +.. seealso:: + + * `Make print a function `_ + diff --git a/docs/docsite/rst/dev_guide/testing/sanity/ignores.rst b/docs/docsite/rst/dev_guide/testing/sanity/ignores.rst new file mode 100644 index 00000000..9d7a94c0 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/ignores.rst @@ -0,0 +1,99 @@ +ignores +======= + +Sanity tests for individual files can be skipped, and specific errors can be ignored. + +When to Ignore Errors +--------------------- + +Sanity tests are designed to improve code quality and identify common issues with content. +When issues are identified during development, those issues should be corrected. + +As development of Ansible continues, sanity tests are expanded to detect issues that previous releases could not. +To allow time for existing content to be updated to pass newer tests, ignore entries can be added. +New content should not use ignores for existing sanity tests. + +When code is fixed to resolve sanity test errors, any relevant ignores must also be removed. +If the ignores are not removed, this will be reported as an unnecessary ignore error. +This is intended to prevent future regressions due to the same error recurring after being fixed. + +When to Skip Tests +------------------ + +Although rare, there are reasons for skipping a sanity test instead of ignoring the errors it reports. + +If a sanity test results in a traceback when processing content, that error cannot be ignored. +If this occurs, open a new `bug report `_ for the issue so it can be fixed. +If the traceback occurs due to an issue with the content, that issue should be fixed. +If the content is correct, the test will need to be skipped until the bug in the sanity test is fixed. + + Caution should be used when skipping sanity tests instead of ignoring them. + Since the test is skipped entirely, resolution of the issue will not be automatically detected. + This will prevent prevent regression detection from working once the issue has been resolved. + For this reason it is a good idea to periodically review skipped entries manually to verify they are required. + +Ignore File Location +-------------------- + +The location of the ignore file depends on the type of content being tested. + +Ansible Collections +~~~~~~~~~~~~~~~~~~~ + +Since sanity tests change between Ansible releases, a separate ignore file is needed for each Ansible major release. + +The filename is ``tests/sanity/ignore-X.Y.txt`` where ``X.Y`` is the Ansible release being used to test the collection. + +Maintaining a separate file for each Ansible release allows a collection to pass tests for multiple versions of Ansible. + +Ansible +~~~~~~~ + +When testing Ansible, all ignores are placed in the ``test/sanity/ignore.txt`` file. + +Only a single file is needed because ``ansible-test`` is developed and released as a part of Ansible itself. + +Ignore File Format +------------------ + +The ignore file contains one entry per line. +Each line consists of two columns, separated by a single space. +Comments may be added at the end of an entry, started with a hash (``#``) character, which can be proceeded by zero or more spaces. +Blank and comment only lines are not allowed. + +The first column specifies the file path that the entry applies to. +File paths must be relative to the root of the content being tested. +This is either the Ansible source or an Ansible collection. +File paths cannot contain a space or the hash (``#``) character. + +The second column specifies the sanity test that the entry applies to. +This will be the name of the sanity test. +If the sanity test is specific to a version of Python, the name will include a dash (``-``) and the relevant Python version. +If the named test uses error codes then the error code to ignore must be appended to the name of the test, separated by a colon (``:``). + +Below are some example ignore entries for an Ansible collection:: + + roles/my_role/files/my_script.sh shellcheck:SC2154 # ignore undefined variable + plugins/modules/my_module.py validate-modules:E105 # ignore license check + plugins/modules/my_module.py import-3.8 # needs update to support collections.abc on Python 3.8+ + +It is also possible to skip a sanity test for a specific file. +This is done by adding ``!skip`` after the sanity test name in the second column. +When this is done, no error code is included, even if the sanity test uses error codes. + +Below are some example skip entries for an Ansible collection:: + + plugins/module_utils/my_util.py validate-modules!skip # waiting for bug fix in module validator + plugins/lookup/my_plugin.py compile-2.6!skip # Python 2.6 is not supported on the controller + +Ignore File Errors +------------------ + +There are various errors that can be reported for the ignore file itself: + +- syntax errors parsing the ignore file +- references a file path that does not exist +- references to a sanity test that does not exist +- ignoring an error that does not occur +- ignoring a file which is skipped +- duplicate entries diff --git a/docs/docsite/rst/dev_guide/testing/sanity/import.rst b/docs/docsite/rst/dev_guide/testing/sanity/import.rst new file mode 100644 index 00000000..4b29636a --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/import.rst @@ -0,0 +1,5 @@ +import +====== + +All Python imports in ``lib/ansible/modules/`` and ``lib/ansible/module_utils/`` which are not from the Python standard library +must be imported in a try/except ImportError block. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/integration-aliases.rst b/docs/docsite/rst/dev_guide/testing/sanity/integration-aliases.rst new file mode 100644 index 00000000..e6cc1e91 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/integration-aliases.rst @@ -0,0 +1,182 @@ +integration-aliases +=================== + +Integration tests are executed by ``ansible-test`` and reside in directories under ``test/integration/targets/``. +Each test MUST have an ``aliases`` file to control test execution. + +Aliases are explained in the following sections. Each alias must be on a separate line in an ``aliases`` file. + +Groups +------ + +Tests must be configured to run in exactly one group. This is done by adding the appropriate group to the ``aliases`` file. + +The following are examples of some of the available groups: + +- ``shippable/posix/group1`` +- ``shippable/windows/group2`` +- ``shippable/azure/group3`` +- ``shippable/aws/group1`` +- ``shippable/cloud/group1`` + +Groups are used to balance tests across multiple CI jobs to minimize test run time. +They also improve efficiency by keeping tests with similar requirements running together. + +When selecting a group for a new test, use the same group as existing tests similar to the one being added. +If more than one group is available, select one randomly. + +Setup +----- + +Aliases can be used to execute setup targets before running tests: + +- ``setup/once/TARGET`` - Run the target ``TARGET`` before the first target that requires it. +- ``setup/always/TARGET`` - Run the target ``TARGET`` before each target that requires it. + +Requirements +------------ + +Aliases can be used to express some test requirements: + +- ``needs/privileged`` - Requires ``--docker-privileged`` when running tests with ``--docker``. +- ``needs/root`` - Requires running tests as ``root`` or with ``--docker``. +- ``needs/ssh`` - Requires SSH connections to localhost (or the test container with ``--docker``) without a password. +- ``needs/httptester`` - Requires use of the http-test-container to run tests. + +Dependencies +------------ + +Some test dependencies are automatically discovered: + +- Ansible role dependencies defined in ``meta/main.yml`` files. +- Setup targets defined with ``setup/*`` aliases. +- Symbolic links from one target to a file in another target. + +Aliases can be used to declare dependencies that are not handled automatically: + +- ``needs/target/TARGET`` - Requires use of the test target ``TARGET``. +- ``needs/file/PATH`` - Requires use of the file ``PATH`` relative to the git root. + +Skipping +-------- + +Aliases can be used to skip platforms using one of the following: + +- ``skip/freebsd`` - Skip tests on FreeBSD. +- ``skip/osx`` - Skip tests on macOS. +- ``skip/rhel`` - Skip tests on RHEL. +- ``skip/docker`` - Skip tests when running in a Docker container. + +Platform versions, as specified using the ``--remote`` option with ``/`` removed, can also be skipped: + +- ``skip/freebsd11.1`` - Skip tests on FreeBSD 11.1. +- ``skip/rhel7.6`` - Skip tests on RHEL 7.6. + +Windows versions, as specified using the ``--windows`` option can also be skipped: + +- ``skip/windows/2008`` - Skip tests on Windows Server 2008. +- ``skip/windows/2012-R2`` - Skip tests on Windows Server 2012 R2. + +Aliases can be used to skip Python major versions using one of the following: + +- ``skip/python2`` - Skip tests on Python 2.x. +- ``skip/python3`` - Skip tests on Python 3.x. + +For more fine grained skipping, use conditionals in integration test playbooks, such as: + +.. code-block:: yaml + + when: ansible_distribution in ('Ubuntu') + + +Miscellaneous +------------- + +There are several other aliases available as well: + +- ``destructive`` - Requires ``--allow-destructive`` to run without ``--docker`` or ``--remote``. +- ``hidden`` - Target is ignored. Usable as a dependency. Automatic for ``setup_`` and ``prepare_`` prefixed targets. + +Unstable +-------- + +Tests which fail sometimes should be marked with the ``unstable`` alias until the instability has been fixed. +These tests will continue to run for pull requests which modify the test or the module under test. + +This avoids unnecessary test failures for other pull requests, as well as tests on merge runs and nightly CI jobs. + +There are two ways to run unstable tests manually: + +- Use the ``--allow-unstable`` option for ``ansible-test`` +- Prefix the test name with ``unstable/`` when passing it to ``ansible-test``. + +Tests will be marked as unstable by a member of the Ansible Core Team. +GitHub issues_ will be created to track each unstable test. + +Disabled +-------- + +Tests which always fail should be marked with the ``disabled`` alias until they can be fixed. + +Disabled tests are automatically skipped. + +There are two ways to run disabled tests manually: + +- Use the ``--allow-disabled`` option for ``ansible-test`` +- Prefix the test name with ``disabled/`` when passing it to ``ansible-test``. + +Tests will be marked as disabled by a member of the Ansible Core Team. +GitHub issues_ will be created to track each disabled test. + +Unsupported +----------- + +Tests which cannot be run in CI should be marked with the ``unsupported`` alias. +Most tests can be supported through the use of simulators and/or cloud plugins. + +However, if that is not possible then marking a test as unsupported will prevent it from running in CI. + +There are two ways to run unsupported tests manually: + +* Use the ``--allow-unsupported`` option for ``ansible-test`` +* Prefix the test name with ``unsupported/`` when passing it to ``ansible-test``. + +Tests will be marked as unsupported by the contributor of the test. + +Cloud +----- + +Tests for cloud services and other modules that require access to external APIs usually require special support for testing in CI. + +These require an additional alias to indicate the required test plugin. + +Some of the available aliases are: + +- ``cloud/aws`` +- ``cloud/azure`` +- ``cloud/cs`` +- ``cloud/foreman`` +- ``cloud/openshift`` +- ``cloud/tower`` +- ``cloud/vcenter`` + +Untested +-------- + +Every module and plugin should have integration tests, even if the tests cannot be run in CI. + +Issues +------ + +Tests that are marked as unstable_ or disabled_ will have an issue created to track the status of the test. +Each issue will be assigned to one of the following projects: + +- `AWS `_ +- `Azure `_ +- `Windows `_ +- `General `_ + +Questions +--------- + +For questions about integration tests reach out to @mattclay or @gundalow on GitHub or ``#ansible-devel`` on IRC. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/line-endings.rst b/docs/docsite/rst/dev_guide/testing/sanity/line-endings.rst new file mode 100644 index 00000000..d56cfc12 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/line-endings.rst @@ -0,0 +1,4 @@ +line-endings +============ + +All files must use ``\n`` for line endings instead of ``\r\n``. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/metaclass-boilerplate.rst b/docs/docsite/rst/dev_guide/testing/sanity/metaclass-boilerplate.rst new file mode 100644 index 00000000..c7327b39 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/metaclass-boilerplate.rst @@ -0,0 +1,23 @@ +metaclass-boilerplate +===================== + +Most Python files should include the following boilerplate at the top of the file, right after the +comment header and ``from __future__ import``: + +.. code-block:: python + + __metaclass__ = type + + +Python 2 has "new-style classes" and "old-style classes" whereas Python 3 only has new-style classes. +Adding the ``__metaclass__ = type`` boilerplate makes every class defined in that file into +a new-style class as well. + +.. code-block:: python + + from __future__ import absolute_import, division, print_function + __metaclass__ = type + + class Foo: + # This is a new-style class even on Python 2 because of the __metaclass__ + pass diff --git a/docs/docsite/rst/dev_guide/testing/sanity/no-assert.rst b/docs/docsite/rst/dev_guide/testing/sanity/no-assert.rst new file mode 100644 index 00000000..489f917f --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/no-assert.rst @@ -0,0 +1,16 @@ +no-assert +========= + +Do not use ``assert`` in production Ansible python code. When running Python +with optimizations, Python will remove ``assert`` statements, potentially +allowing for unexpected behavior throughout the Ansible code base. + +Instead of using ``assert`` you should utilize simple ``if`` statements, +that result in raising an exception. There is a new exception called +``AnsibleAssertionError`` that inherits from ``AnsibleError`` and +``AssertionError``. When possible, utilize a more specific exception +than ``AnsibleAssertionError``. + +Modules will not have access to ``AnsibleAssertionError`` and should instead +raise ``AssertionError``, a more specific exception, or just use +``module.fail_json`` at the failure point. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/no-basestring.rst b/docs/docsite/rst/dev_guide/testing/sanity/no-basestring.rst new file mode 100644 index 00000000..f1b6ba92 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/no-basestring.rst @@ -0,0 +1,11 @@ +no-basestring +============= + +Do not use ``isinstance(s, basestring)`` as basestring has been removed in +Python3. You can import ``string_types``, ``binary_type``, or ``text_type`` +from ``ansible.module_utils.six`` and then use ``isinstance(s, string_types)`` +or ``isinstance(s, (binary_type, text_type))`` instead. + +If this is part of code to convert a string to a particular type, +``ansible.module_utils._text`` contains several functions that may be even +better for you: ``to_text``, ``to_bytes``, and ``to_native``. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/no-dict-iteritems.rst b/docs/docsite/rst/dev_guide/testing/sanity/no-dict-iteritems.rst new file mode 100644 index 00000000..e231c796 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/no-dict-iteritems.rst @@ -0,0 +1,16 @@ +no-dict-iteritems +================= + +The ``dict.iteritems`` method has been removed in Python 3. There are two recommended alternatives: + +.. code-block:: python + + for KEY, VALUE in DICT.items(): + pass + +.. code-block:: python + + from ansible.module_utils.six import iteritems + + for KEY, VALUE in iteritems(DICT): + pass diff --git a/docs/docsite/rst/dev_guide/testing/sanity/no-dict-iterkeys.rst b/docs/docsite/rst/dev_guide/testing/sanity/no-dict-iterkeys.rst new file mode 100644 index 00000000..9dc4a978 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/no-dict-iterkeys.rst @@ -0,0 +1,9 @@ +no-dict-iterkeys +================ + +The ``dict.iterkeys`` method has been removed in Python 3. Use the following instead: + +.. code-block:: python + + for KEY in DICT: + pass diff --git a/docs/docsite/rst/dev_guide/testing/sanity/no-dict-itervalues.rst b/docs/docsite/rst/dev_guide/testing/sanity/no-dict-itervalues.rst new file mode 100644 index 00000000..979450e4 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/no-dict-itervalues.rst @@ -0,0 +1,16 @@ +no-dict-itervalues +================== + +The ``dict.itervalues`` method has been removed in Python 3. There are two recommended alternatives: + +.. code-block:: python + + for VALUE in DICT.values(): + pass + +.. code-block:: python + + from ansible.module_utils.six import itervalues + + for VALUE in itervalues(DICT): + pass diff --git a/docs/docsite/rst/dev_guide/testing/sanity/no-get-exception.rst b/docs/docsite/rst/dev_guide/testing/sanity/no-get-exception.rst new file mode 100644 index 00000000..584fbc86 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/no-get-exception.rst @@ -0,0 +1,28 @@ +no-get-exception +================ + +We created a function, ``ansible.module_utils.pycompat24.get_exception`` to +help retrieve exceptions in a manner compatible with Python 2.4 through +Python 3.6. We no longer support Python 2.4 and Python 2.5 so this is +extraneous and we want to deprecate the function. Porting code should look +something like this: + +.. code-block:: python + + # Unfixed code: + try: + raise IOError('test') + except IOError: + e = get_excetion() + do_something(e) + except: + e = get_exception() + do_something_else(e) + + # After fixing: + try: + raise IOError('test') + except IOErrors as e: + do_something(e) + except Exception as e: + do_something_else(e) diff --git a/docs/docsite/rst/dev_guide/testing/sanity/no-illegal-filenames.rst b/docs/docsite/rst/dev_guide/testing/sanity/no-illegal-filenames.rst new file mode 100644 index 00000000..6e6f565e --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/no-illegal-filenames.rst @@ -0,0 +1,61 @@ +no-illegal-filenames +==================== + +Files and directories should not contain illegal characters or names so that +Ansible can be checked out on any Operating System. + +Illegal Characters +------------------ + +The following characters are not allowed to be used in any part of the file or +directory name; + +* ``<`` +* ``>`` +* ``:`` +* ``"`` +* ``/`` +* ``\`` +* ``|`` +* ``?`` +* ``*`` +* Any characters whose integer representations are in the range from 0 through to 31 like ``\n`` + +The following characters are not allowed to be used as the last character of a +file or directory; + +* ``.`` +* ``" "`` (just the space character) + +Illegal Names +------------- + +The following names are not allowed to be used as the name of a file or +directory excluding the extension; + +* ``CON`` +* ``PRN`` +* ``AUX`` +* ``NUL`` +* ``COM1`` +* ``COM2`` +* ``COM3`` +* ``COM4`` +* ``COM5`` +* ``COM6`` +* ``COM7`` +* ``COM8`` +* ``COM9`` +* ``LPT1`` +* ``LPT2`` +* ``LPT3`` +* ``LPT4`` +* ``LPT5`` +* ``LPT6`` +* ``LPT7`` +* ``LPT8`` +* ``LPT9`` + +For example, the file ``folder/COM1``, ``folder/COM1.txt`` are illegal but +``folder/COM1-file`` or ``folder/COM1-file.txt`` is allowed. + diff --git a/docs/docsite/rst/dev_guide/testing/sanity/no-main-display.rst b/docs/docsite/rst/dev_guide/testing/sanity/no-main-display.rst new file mode 100644 index 00000000..7ccf0dc7 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/no-main-display.rst @@ -0,0 +1,12 @@ +no-main-display +=============== + +As of Ansible 2.8, ``Display`` should no longer be imported from ``__main__``. + +``Display`` is now a singleton and should be utilized like the following:: + + from ansible.utils.display import Display + display = Display() + +There is no longer a need to attempt ``from __main__ import display`` inside +a ``try/except`` block. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/no-smart-quotes.rst b/docs/docsite/rst/dev_guide/testing/sanity/no-smart-quotes.rst new file mode 100644 index 00000000..50dc7baf --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/no-smart-quotes.rst @@ -0,0 +1,4 @@ +no-smart-quotes +=============== + +Smart quotes (``”“‘’``) should not be used. Use plain ascii quotes (``"'``) instead. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/no-tests-as-filters.rst b/docs/docsite/rst/dev_guide/testing/sanity/no-tests-as-filters.rst new file mode 100644 index 00000000..0c1f99ac --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/no-tests-as-filters.rst @@ -0,0 +1,12 @@ +:orphan: + +no-tests-as-filters +=================== + +Using Ansible provided Jinja2 tests as filters will be removed in Ansible 2.9. + +Prior to Ansible 2.5, Jinja2 tests included within Ansible were most often used as filters. The large difference in use is that filters are referenced as ``variable | filter_name`` while Jinja2 tests are referenced as ``variable is test_name``. + +Jinja2 tests are used for comparisons, whereas filters are used for data manipulation, and have different applications in Jinja2. This change is to help differentiate the concepts for a better understanding of Jinja2, and where each can be appropriately used. + +As of Ansible 2.5 using an Ansible provided Jinja2 test with filter syntax will display a deprecation error. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/no-underscore-variable.rst b/docs/docsite/rst/dev_guide/testing/sanity/no-underscore-variable.rst new file mode 100644 index 00000000..5174a43a --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/no-underscore-variable.rst @@ -0,0 +1,30 @@ +:orphan: + +no-underscore-variable +====================== + +In the future, Ansible may use the identifier ``_`` to internationalize its +message strings. To be ready for that, we need to make sure that there are +no conflicting identifiers defined in the code base. + +In common practice, ``_`` is frequently used as a dummy variable (a variable +to receive a value from a function where the value is useless and never used). +In Ansible, we're using the identifier ``dummy`` for this purpose instead. + +Example of unfixed code: + +.. code-block:: python + + for _ in range(0, retries): + success = retry_thing() + if success: + break + +Example of fixed code: + +.. code-block:: python + + for dummy in range(0, retries): + success = retry_thing() + if success: + break diff --git a/docs/docsite/rst/dev_guide/testing/sanity/no-unicode-literals.rst b/docs/docsite/rst/dev_guide/testing/sanity/no-unicode-literals.rst new file mode 100644 index 00000000..c4f3586a --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/no-unicode-literals.rst @@ -0,0 +1,16 @@ +no-unicode_literals +=================== + +The use of :code:`from __future__ import unicode_literals` has been deemed an anti-pattern. The +problems with it are: + +* It makes it so one can't jump into the middle of a file and know whether a bare literal string is + a byte string or text string. The programmer has to first check the top of the file to see if the + import is there. +* It removes the ability to define native strings (a string which should be a byte string on python2 + and a text string on python3) via a string literal. +* It makes for more context switching. A programmer could be reading one file which has + `unicode_literals` and know that bare string literals are text strings but then switch to another + file (perhaps tracing program execution into a third party library) and have to switch their + understanding of what bare string literals are. + diff --git a/docs/docsite/rst/dev_guide/testing/sanity/no-unwanted-files.rst b/docs/docsite/rst/dev_guide/testing/sanity/no-unwanted-files.rst new file mode 100644 index 00000000..3d76324e --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/no-unwanted-files.rst @@ -0,0 +1,13 @@ +no-unwanted-files +================= + +Specific file types are allowed in certain directories: + +- ``lib`` - All content must reside in the ``lib/ansible`` directory. + +- ``lib/ansible`` - Only source code with one of the following extensions is allowed: + + - ``*.cs`` - C# + - ``*.ps1`` - PowerShell + - ``*.psm1`` - PowerShell + - ``*.py`` - Python diff --git a/docs/docsite/rst/dev_guide/testing/sanity/no-wildcard-import.rst b/docs/docsite/rst/dev_guide/testing/sanity/no-wildcard-import.rst new file mode 100644 index 00000000..fdaf07b0 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/no-wildcard-import.rst @@ -0,0 +1,31 @@ +:orphan: + +no-wildcard-import +================== + +Using :code:`import *` is a bad habit which pollutes your namespace, hinders +debugging, and interferes with static analysis of code. For those reasons, we +do want to limit the use of :code:`import *` in the ansible code. Change our +code to import the specific names that you need instead. + +Examples of unfixed code: + +.. code-block:: python + + from ansible.module_utils.six import * + if isinstance(variable, string_types): + do_something(variable) + + from ansible.module_utils.basic import * + module = AnsibleModule() + +Examples of fixed code: + +.. code-block:: python + + from ansible.module_utils import six + if isinstance(variable, six.string_types): + do_something(variable) + + from ansible.module_utils.basic import AnsibleModule + module = AnsibleModule() diff --git a/docs/docsite/rst/dev_guide/testing/sanity/obsolete-files.rst b/docs/docsite/rst/dev_guide/testing/sanity/obsolete-files.rst new file mode 100644 index 00000000..6e2fb2a5 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/obsolete-files.rst @@ -0,0 +1,14 @@ +obsolete-files +============== + +Directories in the Ansible source tree are sometimes made obsolete. +Files should not exist in these directories. +The new location (if any) is dependent on which directory has been made obsolete. + +Below are some of the obsolete directories and their new locations: + +- All of ``test/runner/`` is now under ``test/lib/ansible_test/`` instead. The organization of files in the new directory has changed. +- Most subdirectories of ``test/sanity/`` (with some exceptions) are now under ``test/lib/ansible_test/_data/sanity/`` instead. + +This error occurs most frequently for open pull requests which add or modify files in directories which are now obsolete. +Make sure the branch you are working from is current so that changes can be made in the correct location. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/package-data.rst b/docs/docsite/rst/dev_guide/testing/sanity/package-data.rst new file mode 100644 index 00000000..220872dd --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/package-data.rst @@ -0,0 +1,5 @@ +package-data +============ + +Verifies that the combination of ``MANIFEST.in`` and ``package_data`` from ``setup.py`` +properly installs data files from within ``lib/ansible`` diff --git a/docs/docsite/rst/dev_guide/testing/sanity/pep8.rst b/docs/docsite/rst/dev_guide/testing/sanity/pep8.rst new file mode 100644 index 00000000..8595d986 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/pep8.rst @@ -0,0 +1,6 @@ +pep8 +==== + +Python static analysis for PEP 8 style guideline compliance. + +See :ref:`testing_pep8` for more information. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/pslint.rst b/docs/docsite/rst/dev_guide/testing/sanity/pslint.rst new file mode 100644 index 00000000..baa4fa03 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/pslint.rst @@ -0,0 +1,4 @@ +pslint +====== + +PowerShell static analysis for common programming errors using `PSScriptAnalyzer `_. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/pylint-ansible-test.rst b/docs/docsite/rst/dev_guide/testing/sanity/pylint-ansible-test.rst new file mode 100644 index 00000000..a80ddc1e --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/pylint-ansible-test.rst @@ -0,0 +1,8 @@ +:orphan: + +pylint-ansible-test +=================== + +Python static analysis for common programming errors. + +A more strict set of rules applied to ``ansible-test``. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/pylint.rst b/docs/docsite/rst/dev_guide/testing/sanity/pylint.rst new file mode 100644 index 00000000..2b2ef9e5 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/pylint.rst @@ -0,0 +1,4 @@ +pylint +====== + +Python static analysis for common programming errors. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/release-names.rst b/docs/docsite/rst/dev_guide/testing/sanity/release-names.rst new file mode 100644 index 00000000..359f7ecb --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/release-names.rst @@ -0,0 +1,4 @@ +Release names +============= + +Verifies that the most recent release name has been added to ``./github/RELEASE_NAMES.yml`` diff --git a/docs/docsite/rst/dev_guide/testing/sanity/replace-urlopen.rst b/docs/docsite/rst/dev_guide/testing/sanity/replace-urlopen.rst new file mode 100644 index 00000000..705195c9 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/replace-urlopen.rst @@ -0,0 +1,4 @@ +replace-urlopen +=============== + +Use ``open_url`` from ``module_utils`` instead of ``urlopen``. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/required-and-default-attributes.rst b/docs/docsite/rst/dev_guide/testing/sanity/required-and-default-attributes.rst new file mode 100644 index 00000000..573c3615 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/required-and-default-attributes.rst @@ -0,0 +1,5 @@ +required-and-default-attributes +=============================== + +Use only one of ``default`` or ``required`` with ``FieldAttribute``. + diff --git a/docs/docsite/rst/dev_guide/testing/sanity/rstcheck.rst b/docs/docsite/rst/dev_guide/testing/sanity/rstcheck.rst new file mode 100644 index 00000000..8fcbbce3 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/rstcheck.rst @@ -0,0 +1,4 @@ +rstcheck +======== + +Check reStructuredText files for syntax and formatting issues. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/runtime-metadata.rst b/docs/docsite/rst/dev_guide/testing/sanity/runtime-metadata.rst new file mode 100644 index 00000000..cf6d9272 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/runtime-metadata.rst @@ -0,0 +1,7 @@ +runtime-metadata.yml +==================== + +Validates the schema for: + +* ansible-base's ``lib/ansible/config/ansible_builtin_runtime.yml`` +* collection's ``meta/runtime.yml`` diff --git a/docs/docsite/rst/dev_guide/testing/sanity/sanity-docs.rst b/docs/docsite/rst/dev_guide/testing/sanity/sanity-docs.rst new file mode 100644 index 00000000..34265c34 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/sanity-docs.rst @@ -0,0 +1,4 @@ +sanity-docs +=========== + +Documentation for each ``ansible-test sanity`` test is required. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/shebang.rst b/docs/docsite/rst/dev_guide/testing/sanity/shebang.rst new file mode 100644 index 00000000..cff2aa09 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/shebang.rst @@ -0,0 +1,16 @@ +shebang +======= + +Most executable files should only use one of the following shebangs: + +- ``#!/bin/sh`` +- ``#!/bin/bash`` +- ``#!/usr/bin/make`` +- ``#!/usr/bin/env python`` +- ``#!/usr/bin/env bash`` + +NOTE: For ``#!/bin/bash``, any of the options ``eux`` may also be used, such as ``#!/bin/bash -eux``. + +This does not apply to Ansible modules, which should not be executable and must always use ``#!/usr/bin/python``. + +Some exceptions are permitted. Ask if you have questions. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/shellcheck.rst b/docs/docsite/rst/dev_guide/testing/sanity/shellcheck.rst new file mode 100644 index 00000000..446ee1ee --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/shellcheck.rst @@ -0,0 +1,4 @@ +shellcheck +========== + +Static code analysis for shell scripts using the excellent `shellcheck `_ tool. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/symlinks.rst b/docs/docsite/rst/dev_guide/testing/sanity/symlinks.rst new file mode 100644 index 00000000..017209bd --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/symlinks.rst @@ -0,0 +1,6 @@ +symlinks +======== + +Symbolic links are only permitted for files that exist to ensure proper tarball generation during a release. + +If other types of symlinks are needed for tests they must be created as part of the test. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/test-constraints.rst b/docs/docsite/rst/dev_guide/testing/sanity/test-constraints.rst new file mode 100644 index 00000000..36ceb361 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/test-constraints.rst @@ -0,0 +1,4 @@ +test-constraints +================ + +Constraints for test requirements should be in ``test/lib/ansible_test/_data/requirements/constraints.txt``. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/update-bundled.rst b/docs/docsite/rst/dev_guide/testing/sanity/update-bundled.rst new file mode 100644 index 00000000..d8f19385 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/update-bundled.rst @@ -0,0 +1,31 @@ +:orphan: + +update-bundled +============== + +Check whether any of our known bundled code needs to be updated for a new upstream release. + +This test can error in the following ways: + +* The bundled code is out of date with regard to the latest release on pypi. Update the code + to the new version and update the version in _BUNDLED_METADATA to solve this. + +* The code is lacking a _BUNDLED_METADATA variable. This typically happens when a bundled version + is updated and we forget to add a _BUNDLED_METADATA variable to the updated file. Once that is + added, this error should go away. + +* A file has a _BUNDLED_METADATA variable but the file isn't specified in + :file:`test/sanity/code-smell/update-bundled.py`. This typically happens when a new bundled + library is added. Add the file to the `get_bundled_libs()` function in the `update-bundled.py` + test script to solve this error. + +_BUNDLED_METADATA has the following fields: + +:pypi_name: Name of the bundled package on pypi + +:version: Version of the package that we are including here + +:version_constraints: Optional PEP440 specifier for the version range that we are bundling. + Currently, the only valid use of this is to follow a version that is + compatible with the Python stdlib when newer versions of the pypi package + implement a new API. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/use-argspec-type-path.rst b/docs/docsite/rst/dev_guide/testing/sanity/use-argspec-type-path.rst new file mode 100644 index 00000000..e06d83dd --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/use-argspec-type-path.rst @@ -0,0 +1,10 @@ +use-argspec-type-path +===================== + +The AnsibleModule argument_spec knows of several types beyond the standard python types. One of +these is ``path``. When used, type ``path`` ensures that an argument is a string and expands any +shell variables and tilde characters. + +This test looks for use of :func:`os.path.expanduser ` in modules. When found, it tells the user to +replace it with ``type='path'`` in the module's argument_spec or list it as a false positive in the +test. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/use-compat-six.rst b/docs/docsite/rst/dev_guide/testing/sanity/use-compat-six.rst new file mode 100644 index 00000000..1f415005 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/use-compat-six.rst @@ -0,0 +1,4 @@ +use-compat-six +============== + +Use ``six`` from ``module_utils`` instead of ``six``. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/validate-modules.rst b/docs/docsite/rst/dev_guide/testing/sanity/validate-modules.rst new file mode 100644 index 00000000..efb58f20 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/validate-modules.rst @@ -0,0 +1,6 @@ +validate-modules +================ + +Analyze modules for common issues in code and documentation. + +See :ref:`testing_validate-modules` for more information. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/yamllint.rst b/docs/docsite/rst/dev_guide/testing/sanity/yamllint.rst new file mode 100644 index 00000000..5822bb7c --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/yamllint.rst @@ -0,0 +1,4 @@ +yamllint +======== + +Check YAML files for syntax and formatting issues. diff --git a/docs/docsite/rst/dev_guide/testing_compile.rst b/docs/docsite/rst/dev_guide/testing_compile.rst new file mode 100644 index 00000000..5c22194d --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing_compile.rst @@ -0,0 +1,76 @@ +:orphan: + +.. _testing_compile: + +************* +Compile Tests +************* + +.. contents:: Topics + +Overview +======== + +Compile tests check source files for valid syntax on all supported python versions: + +- 2.4 (Ansible 2.3 only) +- 2.6 +- 2.7 +- 3.5 +- 3.6 +- 3.7 +- 3.8 +- 3.9 + +NOTE: In Ansible 2.4 and earlier the compile test was provided by a dedicated sub-command ``ansible-test compile`` instead of a sanity test using ``ansible-test sanity --test compile``. + +Running compile tests locally +============================= + +Compile tests can be run across the whole code base by doing: + +.. code:: shell + + cd /path/to/ansible/source + source hacking/env-setup + ansible-test sanity --test compile + +Against a single file by doing: + +.. code:: shell + + ansible-test sanity --test compile lineinfile + +Or against a specific Python version by doing: + +.. code:: shell + + ansible-test sanity --test compile --python 2.7 lineinfile + +For advanced usage see the help: + +.. code:: shell + + ansible-test sanity --help + + +Installing dependencies +======================= + +``ansible-test`` has a number of dependencies , for ``compile`` tests we suggest running the tests with ``--local``, which is the default + +The dependencies can be installed using the ``--requirements`` argument. For example: + +.. code:: shell + + ansible-test sanity --test compile --requirements lineinfile + + + +The full list of requirements can be found at `test/lib/ansible_test/_data/requirements `_. Requirements files are named after their respective commands. See also the `constraints `_ applicable to all commands. + + +Extending compile tests +======================= + +If you believe changes are needed to the compile tests please add a comment on the `Testing Working Group Agenda `_ so it can be discussed. diff --git a/docs/docsite/rst/dev_guide/testing_documentation.rst b/docs/docsite/rst/dev_guide/testing_documentation.rst new file mode 100644 index 00000000..f9989395 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing_documentation.rst @@ -0,0 +1,36 @@ +:orphan: + +.. _testing_module_documentation: + +**************************** +Testing module documentation +**************************** + +Before you submit a module for inclusion in the main Ansible repo, you must test your module documentation for correct HTML rendering and to ensure that the argspec matches the documentation in your Python file. The community pages offer more information on :ref:`testing reStructuredText documentation `. + +To check the HTML output of your module documentation: + +#. Ensure working :ref:`development environment `. +#. Install required Python packages (drop '--user' in venv/virtualenv): + + .. code-block:: bash + + pip install --user -r requirements.txt + pip install --user -r docs/docsite/requirements.txt + +#. Ensure your module is in the correct directory: ``lib/ansible/modules/$CATEGORY/mymodule.py``. +#. Build HTML from your module documentation: ``MODULES=mymodule make webdocs``. +#. To build the HTML documentation for multiple modules, use a comma-separated list of module names: ``MODULES=mymodule,mymodule2 make webdocs``. +#. View the HTML page at ``file:///path/to/docs/docsite/_build/html/modules/mymodule_module.html``. + +To ensure that your module documentation matches your ``argument_spec``: + +#. Install required Python packages (drop '--user' in venv/virtualenv): + + .. code-block:: bash + + pip install --user -r test/lib/ansible_test/_data/requirements/sanity.txt + +#. run the ``validate-modules`` test:: + + ansible-test sanity --test validate-modules mymodule diff --git a/docs/docsite/rst/dev_guide/testing_httptester.rst b/docs/docsite/rst/dev_guide/testing_httptester.rst new file mode 100644 index 00000000..a8806371 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing_httptester.rst @@ -0,0 +1,27 @@ +:orphan: + +********** +httptester +********** + +.. contents:: Topics + +Overview +======== + +``httptester`` is a docker container used to host certain resources required by :ref:`testing_integration`. This is to avoid CI tests requiring external resources (such as git or package repos) which, if temporarily unavailable, would cause tests to fail. + +HTTP Testing endpoint which provides the following capabilities: + +* httpbin +* nginx +* SSL +* SNI + + +Source files can be found in the `http-test-container `_ repository. + +Extending httptester +==================== + +If you have sometime to improve ``httptester`` please add a comment on the `Testing Working Group Agenda `_ to avoid duplicated effort. diff --git a/docs/docsite/rst/dev_guide/testing_integration.rst b/docs/docsite/rst/dev_guide/testing_integration.rst new file mode 100644 index 00000000..0880e5b1 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing_integration.rst @@ -0,0 +1,236 @@ +:orphan: + +.. _testing_integration: + +***************** +Integration tests +***************** + +.. contents:: Topics + +The Ansible integration Test system. + +Tests for playbooks, by playbooks. + +Some tests may require credentials. Credentials may be specified with `credentials.yml`. + +Some tests may require root. + +.. note:: + Every new module and plugin should have integration tests, even if the tests cannot be run on Ansible CI infrastructure. + In this case, the tests should be marked with the ``unsupported`` alias in `aliases file `_. + +Quick Start +=========== + +It is highly recommended that you install and activate the ``argcomplete`` python package. +It provides tab completion in ``bash`` for the ``ansible-test`` test runner. + +Configuration +============= + +ansible-test command +-------------------- + +The example below assumes ``bin/`` is in your ``$PATH``. An easy way to achieve that +is to initialize your environment with the ``env-setup`` command:: + + source hacking/env-setup + ansible-test --help + +You can also call ``ansible-test`` with the full path:: + + bin/ansible-test --help + +integration_config.yml +---------------------- + +Making your own version of ``integration_config.yml`` can allow for setting some +tunable parameters to help run the tests better in your environment. Some +tests (for example, cloud tests) will only run when access credentials are provided. For more +information about supported credentials, refer to the various ``cloud-config-*.template`` +files in the ``test/integration/`` directory. + +Prerequisites +============= + +Some tests assume things like hg, svn, and git are installed, and in path. Some tests +(such as those for Amazon Web Services) need separate definitions, which will be covered +later in this document. + +(Complete list pending) + +Non-destructive Tests +===================== + +These tests will modify files in subdirectories, but will not do things that install or remove packages or things +outside of those test subdirectories. They will also not reconfigure or bounce system services. + +.. note:: Running integration tests within Docker + + To protect your system from any potential changes caused by integration tests, and to ensure a sensible set of dependencies are available we recommend that you always run integration tests with the ``--docker`` option, for example ``--docker centos8``. See the `list of supported docker images `_ for options (the ``default`` image is used for sanity and unit tests, as well as for platform independent integration tests such as those for cloud modules). + +.. note:: Avoiding pulling new Docker images + + Use the ``--docker-no-pull`` option to avoid pulling the latest container image. This is required when using custom local images that are not available for download. + +Run as follows for all POSIX platform tests executed by our CI system in a fedora32 docker container:: + + ansible-test integration shippable/ --docker fedora32 + +You can target a specific tests as well, such as for individual modules:: + + ansible-test integration ping + +You can use the ``-v`` option to make the output more verbose:: + + ansible-test integration lineinfile -vvv + +Use the following command to list all the available targets:: + + ansible-test integration --list-targets + +.. note:: Bash users + + If you use ``bash`` with ``argcomplete``, obtain a full list by doing: ``ansible-test integration `` + +Destructive Tests +================= + +These tests are allowed to install and remove some trivial packages. You will likely want to devote these +to a virtual environment, such as Docker. They won't reformat your filesystem:: + + ansible-test integration destructive/ --docker fedora32 + +Windows Tests +============= + +These tests exercise the ``winrm`` connection plugin and Windows modules. You'll +need to define an inventory with a remote Windows 2008 or 2012 Server to use +for testing, and enable PowerShell Remoting to continue. + +Running these tests may result in changes to your Windows host, so don't run +them against a production/critical Windows environment. + +Enable PowerShell Remoting (run on the Windows host via Remote Desktop):: + + Enable-PSRemoting -Force + +Define Windows inventory:: + + cp inventory.winrm.template inventory.winrm + ${EDITOR:-vi} inventory.winrm + +Run the Windows tests executed by our CI system:: + + ansible-test windows-integration -v shippable/ + +Tests in Docker containers +========================== + +If you have a Linux system with Docker installed, running integration tests using the same Docker containers used by +the Ansible continuous integration (CI) system is recommended. + +.. note:: Docker on non-Linux + + Using Docker Engine to run Docker on a non-Linux host (such as macOS) is not recommended. + Some tests may fail, depending on the image used for testing. + Using the ``--docker-privileged`` option when running ``integration`` (not ``network-integration`` or ``windows-integration``) may resolve the issue. + +Running Integration Tests +------------------------- + +To run all CI integration test targets for POSIX platforms in a Ubuntu 18.04 container:: + + ansible-test integration shippable/ --docker ubuntu1804 + +You can also run specific tests or select a different Linux distribution. +For example, to run tests for the ``ping`` module on a Ubuntu 18.04 container:: + + ansible-test integration ping --docker ubuntu1804 + +Container Images +---------------- + +Python 2 +```````` + +Most container images are for testing with Python 2: + + - centos6 + - centos7 + - fedora28 + - opensuse15py2 + - ubuntu1404 + - ubuntu1604 + +Python 3 +```````` + +To test with Python 3 use the following images: + + - centos8 + - fedora32 + - opensuse15 + - ubuntu1804 + + +Legacy Cloud Tests +================== + +Some of the cloud tests run as normal integration tests, and others run as legacy tests; see the +:ref:`testing_integration_legacy` page for more information. + + +Other configuration for Cloud Tests +=================================== + +In order to run some tests, you must provide access credentials in a file named +``cloud-config-aws.yml`` or ``cloud-config-cs.ini`` in the test/integration +directory. Corresponding .template files are available for for syntax help. The newer AWS +tests now use the file test/integration/cloud-config-aws.yml + +IAM policies for AWS +==================== + +Ansible needs fairly wide ranging powers to run the tests in an AWS account. This rights can be provided to a dedicated user. These need to be configured before running the test. + +testing-policies +---------------- + +The GitHub repository `mattclay/aws-terminator `_ +contains two sets of policies used for all existing AWS module integratoin tests. +The `hacking/aws_config/setup_iam.yml` playbook can be used to setup two groups: + + - `ansible-integration-ci` will have the policies applied necessary to run any + integration tests not marked as `unsupported` and are designed to mirror those + used by Ansible's CI. + - `ansible-integration-unsupported` will have the additional policies applied + necessary to run the integraion tests marked as `unsupported` including tests + for managing IAM roles, users and groups. + +Once the groups have been created, you'll need to create a user and make the user a member of these +groups. The policies are designed to minimize the rights of that user. Please note that while this policy does limit +the user to one region, this does not fully restrict the user (primarily due to the limitations of the Amazon ARN +notation). The user will still have wide privileges for viewing account definitions, and will also able to manage +some resources that are not related to testing (for example, AWS lambdas with different names). Tests should not +be run in a primary production account in any case. + +Other Definitions required +-------------------------- + +Apart from installing the policy and giving it to the user identity running the tests, a +lambda role `ansible_integration_tests` has to be created which has lambda basic execution +privileges. + + +Network Tests +============= + +For guidance on writing network test see :ref:`testing_resource_modules`. + + +Where to find out more +====================== + +If you'd like to know more about the plans for improving testing Ansible, join the `Testing Working Group `_. diff --git a/docs/docsite/rst/dev_guide/testing_integration_legacy.rst b/docs/docsite/rst/dev_guide/testing_integration_legacy.rst new file mode 100644 index 00000000..759285e3 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing_integration_legacy.rst @@ -0,0 +1,108 @@ +:orphan: + +.. _testing_integration_legacy: + +******************************************* +Testing using the Legacy Integration system +******************************************* + +.. contents:: Topics + +This page details how to run the integration tests that haven't been ported to the new ``ansible-test`` framework. + +The following areas are still tested using the legacy ``make tests`` command: + +* amazon (some) +* azure +* cloudflare +* cloudscale +* cloudstack +* consul +* exoscale +* gce +* jenkins +* rackspace + +Over time the above list will be reduced as tests are ported to the ``ansible-test`` framework. + + +Running Cloud Tests +==================== + +Cloud tests exercise capabilities of cloud modules (for example, ec2_key). These are +not 'tests run in the cloud' so much as tests that leverage the cloud modules +and are organized by cloud provider. + +Some AWS tests may use environment variables. It is recommended to either unset any AWS environment variables( such as ``AWS_DEFAULT_PROFILE``, ``AWS_SECRET_ACCESS_KEY``, and so on) or be sure that the environment variables match the credentials provided in ``credentials.yml`` to ensure the tests run with consistency to their full capability on the expected account. See `AWS CLI docs `_ for information on creating a profile. + +Subsets of tests may be run by ``#commenting`` out unnecessary roles in the appropriate playbook, such as ``test/integration/amazon.yml``. + +In order to run cloud tests, you must provide access credentials in a file +named ``credentials.yml``. A sample credentials file named +``credentials.template`` is available for syntax help. + +Provide cloud credentials:: + + cp credentials.template credentials.yml + ${EDITOR:-vi} credentials.yml + + +Other configuration +=================== + +In order to run some tests, you must provide access credentials in a file named +``credentials.yml``. A sample credentials file named ``credentials.template`` is available +for syntax help. + +IAM policies for AWS +==================== + +In order to run the tests in an AWS account ansible needs fairly wide ranging powers which +can be provided to a dedicated user or temporary credentials using a specific policy +configured in the AWS account. + +testing-iam-policy.json.j2 +-------------------------- + +The testing-iam-policy.json.j2 file contains a policy which can be given to the user +running the tests to give close to minimum rights required to run the tests. Please note +that this does not fully restrict the user; The user has wide privileges for viewing +account definitions and is also able to manage some resources that are not related to +testing (for example, AWS lambdas with different names) primarily due to the limitations of the +Amazon ARN notation. At the very least the policy limits the user to one region, however +tests should not be run in a primary production account in any case. + +Other Definitions required +-------------------------- + +Apart from installing the policy and giving it to the user identity running +the tests, a lambda role `ansible_integration_tests` has to be created which +has lambda basic execution privileges. + + +Running Tests +============= + +The tests are invoked via a ``Makefile``. + +If you haven't already got Ansible available use the local checkout by doing:: + + source hacking/env-setup + +Run the tests by doing:: + + cd test/integration/ + # TARGET is the name of the test from the list at the top of this page + #make TARGET + # for example + make amazon + # To run all cloud tests you can do: + make cloud + +.. warning:: Possible cost of running cloud tests + + Running cloud integration tests will create and destroy cloud + resources. Running these tests may result in additional fees associated with + your cloud account. Care is taken to ensure that created resources are + removed. However, it is advisable to inspect your AWS console to ensure no + unexpected resources are running. diff --git a/docs/docsite/rst/dev_guide/testing_pep8.rst b/docs/docsite/rst/dev_guide/testing_pep8.rst new file mode 100644 index 00000000..92630995 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing_pep8.rst @@ -0,0 +1,24 @@ +:orphan: + +.. _testing_pep8: + +***** +PEP 8 +***** + +.. contents:: Topics + +`PEP 8`_ style guidelines are enforced by `pycodestyle`_ on all python files in the repository by default. + +Running Locally +=============== + +The `PEP 8`_ check can be run locally with:: + + + ansible-test sanity --test pep8 [file-or-directory-path-to-check] ... + + + +.. _PEP 8: https://www.python.org/dev/peps/pep-0008/ +.. _pycodestyle: https://pypi.org/project/pycodestyle/ diff --git a/docs/docsite/rst/dev_guide/testing_running_locally.rst b/docs/docsite/rst/dev_guide/testing_running_locally.rst new file mode 100644 index 00000000..964a9e8d --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing_running_locally.rst @@ -0,0 +1,89 @@ +:orphan: + +.. _testing_running_locally: + +*************** +Testing Ansible +*************** + +This document describes how to: + +* Run tests locally using ``ansible-test`` +* Extend + +.. contents:: + :local: + +Requirements +============ + +There are no special requirements for running ``ansible-test`` on Python 2.7 or later. +The ``argparse`` package is required for Python 2.6. +The requirements for each ``ansible-test`` command are covered later. + + +Test Environments +================= + +Most ``ansible-test`` commands support running in one or more isolated test environments to simplify testing. + + +Remote +------ + +The ``--remote`` option runs tests in a cloud hosted environment. +An API key is required to use this feature. + + Recommended for integration tests. + +See the `list of supported platforms and versions `_ for additional details. + +Environment Variables +--------------------- + +When using environment variables to manipulate tests there some limitations to keep in mind. Environment variables are: + +* Not propagated from the host to the test environment when using the ``--docker`` or ``--remote`` options. +* Not exposed to the test environment unless whitelisted in ``test/lib/ansible_test/_internal/util.py`` in the ``common_environment`` function. + + Example: ``ANSIBLE_KEEP_REMOTE_FILES=1`` can be set when running ``ansible-test integration --venv``. However, using the ``--docker`` option would + require running ``ansible-test shell`` to gain access to the Docker environment. Once at the shell prompt, the environment variable could be set + and the tests executed. This is useful for debugging tests inside a container by following the + :ref:`Debugging AnsibleModule-based modules ` instructions. + +Interactive Shell +================= + +Use the ``ansible-test shell`` command to get an interactive shell in the same environment used to run tests. Examples: + +* ``ansible-test shell --docker`` - Open a shell in the default docker container. +* ``ansible-test shell --venv --python 3.6`` - Open a shell in a Python 3.6 virtual environment. + + +Code Coverage +============= + +Code coverage reports make it easy to identify untested code for which more tests should +be written. Online reports are available but only cover the ``devel`` branch (see +:ref:`developing_testing`). For new code local reports are needed. + +Add the ``--coverage`` option to any test command to collect code coverage data. If you +aren't using the ``--venv`` or ``--docker`` options which create an isolated python +environment then you may have to use the ``--requirements`` option to ensure that the +correct version of the coverage module is installed:: + + ansible-test coverage erase + ansible-test units --coverage apt + ansible-test integration --coverage aws_lambda + ansible-test coverage html + + +Reports can be generated in several different formats: + +* ``ansible-test coverage report`` - Console report. +* ``ansible-test coverage html`` - HTML report. +* ``ansible-test coverage xml`` - XML report. + +To clear data between test runs, use the ``ansible-test coverage erase`` command. For a full list of features see the online help:: + + ansible-test coverage --help diff --git a/docs/docsite/rst/dev_guide/testing_sanity.rst b/docs/docsite/rst/dev_guide/testing_sanity.rst new file mode 100644 index 00000000..a4f99edd --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing_sanity.rst @@ -0,0 +1,53 @@ +:orphan: + +.. _testing_sanity: + +************ +Sanity Tests +************ + +.. contents:: Topics + +Sanity tests are made up of scripts and tools used to perform static code analysis. +The primary purpose of these tests is to enforce Ansible coding standards and requirements. + +Tests are run with ``ansible-test sanity``. +All available tests are run unless the ``--test`` option is used. + + +How to run +========== + +.. note:: + To run sanity tests using docker, always use the default docker image + by passing the ``--docker`` or ``--docker default`` argument. + +.. note:: + When using docker and the ``--base-branch`` argument, + also use the ``--docker-keep-git`` argument to avoid git related errors. + +.. code:: shell + + source hacking/env-setup + + # Run all sanity tests + ansible-test sanity + + # Run all sanity tests including disabled ones + ansible-test sanity --allow-disabled + + # Run all sanity tests against against certain files + ansible-test sanity lib/ansible/modules/files/template.py + + # Run all tests inside docker (good if you don't have dependencies installed) + ansible-test sanity --docker default + + # Run validate-modules against a specific file + ansible-test sanity --test validate-modules lib/ansible/modules/files/template.py + +Available Tests +=============== + +Tests can be listed with ``ansible-test sanity --list-tests``. + +See the full list of :ref:`sanity tests `, which details the various tests and details how to fix identified issues. diff --git a/docs/docsite/rst/dev_guide/testing_units.rst b/docs/docsite/rst/dev_guide/testing_units.rst new file mode 100644 index 00000000..7573da6f --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing_units.rst @@ -0,0 +1,213 @@ +:orphan: + +.. _testing_units: + +********** +Unit Tests +********** + +Unit tests are small isolated tests that target a specific library or module. Unit tests +in Ansible are currently the only way of driving tests from python within Ansible's +continuous integration process. This means that in some circumstances the tests may be a +bit wider than just units. + +.. contents:: Topics + +Available Tests +=============== + +Unit tests can be found in `test/units +`_. Notice that the directory +structure of the tests matches that of ``lib/ansible/``. + +Running Tests +============= + +.. note:: + To run unit tests using docker, always use the default docker image + by passing the ``--docker`` or ``--docker default`` argument. + +The Ansible unit tests can be run across the whole code base by doing: + +.. code:: shell + + cd /path/to/ansible/source + source hacking/env-setup + ansible-test units --docker -v + +Against a single file by doing: + +.. code:: shell + + ansible-test units --docker -v apt + +Or against a specific Python version by doing: + +.. code:: shell + + ansible-test units --docker -v --python 2.7 apt + +If you are running unit tests against things other than modules, such as module utilities, specify the whole file path: + +.. code:: shell + + ansible-test units --docker -v test/units/module_utils/basic/test_imports.py + +For advanced usage see the online help:: + + ansible-test units --help + +You can also run tests in Ansible's continuous integration system by opening a pull +request. This will automatically determine which tests to run based on the changes made +in your pull request. + + +Installing dependencies +======================= + +If you are running ``ansible-test`` with the ``--docker`` or ``--venv`` option you do not need to install dependencies manually. + +Otherwise you can install dependencies using the ``--requirements`` option, which will +install all the required dependencies needed for unit tests. For example: + +.. code:: shell + + ansible-test units --python 2.7 --requirements apache2_module + + +The list of unit test requirements can be found at `test/units/requirements.txt +`_. + +This does not include the list of unit test requirements for ``ansible-test`` itself, +which can be found at `test/lib/ansible_test/_data/requirements/units.txt +`_. + +See also the `constraints +`_ +applicable to all test commands. + + +Extending unit tests +==================== + + +.. warning:: What a unit test isn't + + If you start writing a test that requires external services then + you may be writing an integration test, rather than a unit test. + + +Structuring Unit Tests +`````````````````````` + +Ansible drives unit tests through `pytest `_. This +means that tests can either be written a simple functions which are included in any file +name like ``test_.py`` or as classes. + +Here is an example of a function:: + + #this function will be called simply because it is called test_*() + + def test_add() + a = 10 + b = 23 + c = 33 + assert a + b = c + +Here is an example of a class:: + + import unittest + + class AddTester(unittest.TestCase) + + def SetUp() + self.a = 10 + self.b = 23 + + # this function will + def test_add() + c = 33 + assert self.a + self.b = c + + # this function will + def test_subtract() + c = -13 + assert self.a - self.b = c + +Both methods work fine in most circumstances; the function-based interface is simpler and +quicker and so that's probably where you should start when you are just trying to add a +few basic tests for a module. The class-based test allows more tidy set up and tear down +of pre-requisites, so if you have many test cases for your module you may want to refactor +to use that. + +Assertions using the simple ``assert`` function inside the tests will give full +information on the cause of the failure with a trace-back of functions called during the +assertion. This means that plain asserts are recommended over other external assertion +libraries. + +A number of the unit test suites include functions that are shared between several +modules, especially in the networking arena. In these cases a file is created in the same +directory, which is then included directly. + + +Module test case common code +```````````````````````````` + +Keep common code as specific as possible within the `test/units/` directory structure. +Don't import common unit test code from directories outside the current or parent directories. + +Don't import other unit tests from a unit test. Any common code should be in dedicated +files that aren't themselves tests. + + +Fixtures files +`````````````` + +To mock out fetching results from devices, or provide other complex data structures that +come from external libraries, you can use ``fixtures`` to read in pre-generated data. + +You can check how `fixtures `_ +are used in `cpuinfo fact tests `_ + +If you are simulating APIs you may find that Python placebo is useful. See +:ref:`testing_units_modules` for more information. + + +Code Coverage For New or Updated Unit Tests +``````````````````````````````````````````` +New code will be missing from the codecov.io coverage reports (see :ref:`developing_testing`), so +local reporting is needed. Most ``ansible-test`` commands allow you to collect code +coverage; this is particularly useful when to indicate where to extend testing. + +To collect coverage data add the ``--coverage`` argument to your ``ansible-test`` command line: + +.. code:: shell + + ansible-test units --coverage apt + ansible-test coverage html + +Results will be written to ``test/results/reports/coverage/index.html`` + +Reports can be generated in several different formats: + +* ``ansible-test coverage report`` - Console report. +* ``ansible-test coverage html`` - HTML report. +* ``ansible-test coverage xml`` - XML report. + +To clear data between test runs, use the ``ansible-test coverage erase`` command. See +:ref:`testing_running_locally` for more information about generating coverage +reports. + + +.. seealso:: + + :ref:`testing_units_modules` + Special considerations for unit testing modules + :ref:`testing_running_locally` + Running tests locally including gathering and reporting coverage data + `Python 3 documentation - 26.4. unittest — Unit testing framework `_ + The documentation of the unittest framework in python 3 + `Python 2 documentation - 25.3. unittest — Unit testing framework `_ + The documentation of the earliest supported unittest framework - from Python 2.6 + `pytest: helps you write better programs `_ + The documentation of pytest - the framework actually used to run Ansible unit tests diff --git a/docs/docsite/rst/dev_guide/testing_units_modules.rst b/docs/docsite/rst/dev_guide/testing_units_modules.rst new file mode 100644 index 00000000..88763eb0 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing_units_modules.rst @@ -0,0 +1,563 @@ +:orphan: + +.. _testing_units_modules: + +**************************** +Unit Testing Ansible Modules +**************************** + +.. highlight:: python + +.. contents:: Topics + +Introduction +============ + +This document explains why, how and when you should use unit tests for Ansible modules. +The document doesn't apply to other parts of Ansible for which the recommendations are +normally closer to the Python standard. There is basic documentation for Ansible unit +tests in the developer guide :ref:`testing_units`. This document should +be readable for a new Ansible module author. If you find it incomplete or confusing, +please open a bug or ask for help on Ansible IRC. + +What Are Unit Tests? +==================== + +Ansible includes a set of unit tests in the :file:`test/units` directory. These tests primarily cover the +internals but can also cover Ansible modules. The structure of the unit tests matches +the structure of the code base, so the tests that reside in the :file:`test/units/modules/` directory +are organized by module groups. + +Integration tests can be used for most modules, but there are situations where +cases cannot be verified using integration tests. This means that Ansible unit test cases +may extend beyond testing only minimal units and in some cases will include some +level of functional testing. + + +Why Use Unit Tests? +=================== + +Ansible unit tests have advantages and disadvantages. It is important to understand these. +Advantages include: + +* Most unit tests are much faster than most Ansible integration tests. The complete suite + of unit tests can be run regularly by a developer on their local system. +* Unit tests can be run by developers who don't have access to the system which the module is + designed to work on, allowing a level of verification that changes to core functions + haven't broken module expectations. +* Unit tests can easily substitute system functions allowing testing of software that + would be impractical. For example, the ``sleep()`` function can be replaced and we check + that a ten minute sleep was called without actually waiting ten minutes. +* Unit tests are run on different Python versions. This allows us to + ensure that the code behaves in the same way on different Python versions. + +There are also some potential disadvantages of unit tests. Unit tests don't normally +directly test actual useful valuable features of software, instead just internal +implementation + +* Unit tests that test the internal, non-visible features of software may make + refactoring difficult if those internal features have to change (see also naming in How + below) +* Even if the internal feature is working correctly it is possible that there will be a + problem between the internal code tested and the actual result delivered to the user + +Normally the Ansible integration tests (which are written in Ansible YAML) provide better +testing for most module functionality. If those tests already test a feature and perform +well there may be little point in providing a unit test covering the same area as well. + +When To Use Unit Tests +====================== + +There are a number of situations where unit tests are a better choice than integration +tests. For example, testing things which are impossible, slow or very difficult to test +with integration tests, such as: + +* Forcing rare / strange / random situations that can't be forced, such as specific network + failures and exceptions +* Extensive testing of slow configuration APIs +* Situations where the integration tests cannot be run as part of the main Ansible + continuous integration running in Shippable. + + + +Providing quick feedback +------------------------ + +Example: + A single step of the rds_instance test cases can take up to 20 + minutes (the time to create an RDS instance in Amazon). The entire + test run can last for well over an hour. All 16 of the unit tests + complete execution in less than 2 seconds. + +The time saving provided by being able to run the code in a unit test makes it worth +creating a unit test when bug fixing a module, even if those tests do not often identify +problems later. As a basic goal, every module should have at least one unit test which +will give quick feedback in easy cases without having to wait for the integration tests to +complete. + +Ensuring correct use of external interfaces +------------------------------------------- + +Unit tests can check the way in which external services are run to ensure that they match +specifications or are as efficient as possible *even when the final output will not be changed*. + +Example: + Package managers are often far more efficient when installing multiple packages at once + rather than each package separately. The final result is the + same: the packages are all installed, so the efficiency is difficult to verify through + integration tests. By providing a mock package manager and verifying that it is called + once, we can build a valuable test for module efficiency. + +Another related use is in the situation where an API has versions which behave +differently. A programmer working on a new version may change the module to work with the +new API version and unintentionally break the old version. A test case +which checks that the call happens properly for the old version can help avoid the +problem. In this situation it is very important to include version numbering in the test case +name (see `Naming unit tests`_ below). + +Providing specific design tests +-------------------------------- + +By building a requirement for a particular part of the +code and then coding to that requirement, unit tests _can_ sometimes improve the code and +help future developers understand that code. + +Unit tests that test internal implementation details of code, on the other hand, almost +always do more harm than good. Testing that your packages to install are stored in a list +would slow down and confuse a future developer who might need to change that list into a +dictionary for efficiency. This problem can be reduced somewhat with clear test naming so +that the future developer immediately knows to delete the test case, but it is often +better to simply leave out the test case altogether and test for a real valuable feature +of the code, such as installing all of the packages supplied as arguments to the module. + + +How to unit test Ansible modules +================================ + +There are a number of techniques for unit testing modules. Beware that most +modules without unit tests are structured in a way that makes testing quite difficult and +can lead to very complicated tests which need more work than the code. Effectively using unit +tests may lead you to restructure your code. This is often a good thing and leads +to better code overall. Good restructuring can make your code clearer and easier to understand. + + +Naming unit tests +----------------- + +Unit tests should have logical names. If a developer working on the module being tested +breaks the test case, it should be easy to figure what the unit test covers from the name. +If a unit test is designed to verify compatibility with a specific software or API version +then include the version in the name of the unit test. + +As an example, ``test_v2_state_present_should_call_create_server_with_name()`` would be a +good name, ``test_create_server()`` would not be. + + +Use of Mocks +------------ + +Mock objects (from https://docs.python.org/3/library/unittest.mock.html) can be very +useful in building unit tests for special / difficult cases, but they can also +lead to complex and confusing coding situations. One good use for mocks would be in +simulating an API. As for 'six', the 'mock' python package is bundled with Ansible (use +``import units.compat.mock``). + +Ensuring failure cases are visible with mock objects +---------------------------------------------------- + +Functions like :meth:`module.fail_json` are normally expected to terminate execution. When you +run with a mock module object this doesn't happen since the mock always returns another mock +from a function call. You can set up the mock to raise an exception as shown above, or you can +assert that these functions have not been called in each test. For example:: + + module = MagicMock() + function_to_test(module, argument) + module.fail_json.assert_not_called() + +This applies not only to calling the main module but almost any other +function in a module which gets the module object. + + +Mocking of the actual module +---------------------------- + +The setup of an actual module is quite complex (see `Passing Arguments`_ below) and often +isn't needed for most functions which use a module. Instead you can use a mock object as +the module and create any module attributes needed by the function you are testing. If +you do this, beware that the module exit functions need special handling as mentioned +above, either by throwing an exception or ensuring that they haven't been called. For example:: + + class AnsibleExitJson(Exception): + """Exception class to be raised by module.exit_json and caught by the test case""" + pass + + # you may also do the same to fail json + module = MagicMock() + module.exit_json.side_effect = AnsibleExitJson(Exception) + with self.assertRaises(AnsibleExitJson) as result: + return = my_module.test_this_function(module, argument) + module.fail_json.assert_not_called() + assert return["changed"] == True + +API definition with unit test cases +----------------------------------- + +API interaction is usually best tested with the function tests defined in Ansible's +integration testing section, which run against the actual API. There are several cases +where the unit tests are likely to work better. + +Defining a module against an API specification +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This case is especially important for modules interacting with web services, which provide +an API that Ansible uses but which are beyond the control of the user. + +By writing a custom emulation of the calls that return data from the API, we can ensure +that only the features which are clearly defined in the specification of the API are +present in the message. This means that we can check that we use the correct +parameters and nothing else. + + +*Example: in rds_instance unit tests a simple instance state is defined*:: + + def simple_instance_list(status, pending): + return {u'DBInstances': [{u'DBInstanceArn': 'arn:aws:rds:us-east-1:1234567890:db:fakedb', + u'DBInstanceStatus': status, + u'PendingModifiedValues': pending, + u'DBInstanceIdentifier': 'fakedb'}]} + +This is then used to create a list of states:: + + rds_client_double = MagicMock() + rds_client_double.describe_db_instances.side_effect = [ + simple_instance_list('rebooting', {"a": "b", "c": "d"}), + simple_instance_list('available', {"c": "d", "e": "f"}), + simple_instance_list('rebooting', {"a": "b"}), + simple_instance_list('rebooting', {"e": "f", "g": "h"}), + simple_instance_list('rebooting', {}), + simple_instance_list('available', {"g": "h", "i": "j"}), + simple_instance_list('rebooting', {"i": "j", "k": "l"}), + simple_instance_list('available', {}), + simple_instance_list('available', {}), + ] + +These states are then used as returns from a mock object to ensure that the ``await`` function +waits through all of the states that would mean the RDS instance has not yet completed +configuration:: + + rds_i.await_resource(rds_client_double, "some-instance", "available", mod_mock, + await_pending=1) + assert(len(sleeper_double.mock_calls) > 5), "await_pending didn't wait enough" + +By doing this we check that the ``await`` function will keep waiting through +potentially unusual that it would be impossible to reliably trigger through the +integration tests but which happen unpredictably in reality. + +Defining a module to work against multiple API versions +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This case is especially important for modules interacting with many different versions of +software; for example, package installation modules that might be expected to work with +many different operating system versions. + +By using previously stored data from various versions of an API we can ensure that the +code is tested against the actual data which will be sent from that version of the system +even when the version is very obscure and unlikely to be available during testing. + +Ansible special cases for unit testing +====================================== + +There are a number of special cases for unit testing the environment of an Ansible module. +The most common are documented below, and suggestions for others can be found by looking +at the source code of the existing unit tests or asking on the Ansible IRC channel or mailing +lists. + +Module argument processing +-------------------------- + +There are two problems with running the main function of a module: + +* Since the module is supposed to accept arguments on ``STDIN`` it is a bit difficult to + set up the arguments correctly so that the module will get them as parameters. +* All modules should finish by calling either the :meth:`module.fail_json` or + :meth:`module.exit_json`, but these won't work correctly in a testing environment. + +Passing Arguments +----------------- + +.. This section should be updated once https://github.com/ansible/ansible/pull/31456 is + closed since the function below will be provided in a library file. + +To pass arguments to a module correctly, use the ``set_module_args`` method which accepts a dictionary +as its parameter. Module creation and argument processing is +handled through the :class:`AnsibleModule` object in the basic section of the utilities. Normally +this accepts input on ``STDIN``, which is not convenient for unit testing. When the special +variable is set it will be treated as if the input came on ``STDIN`` to the module. Simply call that function before setting up your module:: + + import json + from units.modules.utils import set_module_args + from ansible.module_utils._text import to_bytes + + def test_already_registered(self): + set_module_args({ + 'activationkey': 'key', + 'username': 'user', + 'password': 'pass', + }) + +Handling exit correctly +----------------------- + +.. This section should be updated once https://github.com/ansible/ansible/pull/31456 is + closed since the exit and failure functions below will be provided in a library file. + +The :meth:`module.exit_json` function won't work properly in a testing environment since it +writes error information to ``STDOUT`` upon exit, where it +is difficult to examine. This can be mitigated by replacing it (and :meth:`module.fail_json`) with +a function that raises an exception:: + + def exit_json(*args, **kwargs): + if 'changed' not in kwargs: + kwargs['changed'] = False + raise AnsibleExitJson(kwargs) + +Now you can ensure that the first function called is the one you expected simply by +testing for the correct exception:: + + def test_returned_value(self): + set_module_args({ + 'activationkey': 'key', + 'username': 'user', + 'password': 'pass', + }) + + with self.assertRaises(AnsibleExitJson) as result: + my_module.main() + +The same technique can be used to replace :meth:`module.fail_json` (which is used for failure +returns from modules) and for the ``aws_module.fail_json_aws()`` (used in modules for Amazon +Web Services). + +Running the main function +------------------------- + +If you do want to run the actual main function of a module you must import the module, set +the arguments as above, set up the appropriate exit exception and then run the module:: + + # This test is based around pytest's features for individual test functions + import pytest + import ansible.modules.module.group.my_module as my_module + + def test_main_function(monkeypatch): + monkeypatch.setattr(my_module.AnsibleModule, "exit_json", fake_exit_json) + set_module_args({ + 'activationkey': 'key', + 'username': 'user', + 'password': 'pass', + }) + my_module.main() + + +Handling calls to external executables +-------------------------------------- + +Module must use :meth:`AnsibleModule.run_command` in order to execute an external command. This +method needs to be mocked: + +Here is a simple mock of :meth:`AnsibleModule.run_command` (taken from :file:`test/units/modules/packaging/os/test_rhn_register.py`):: + + with patch.object(basic.AnsibleModule, 'run_command') as run_command: + run_command.return_value = 0, '', '' # successful execution, no output + with self.assertRaises(AnsibleExitJson) as result: + self.module.main() + self.assertFalse(result.exception.args[0]['changed']) + # Check that run_command has been called + run_command.assert_called_once_with('/usr/bin/command args') + self.assertEqual(run_command.call_count, 1) + self.assertFalse(run_command.called) + + +A Complete Example +------------------ + +The following example is a complete skeleton that reuses the mocks explained above and adds a new +mock for :meth:`Ansible.get_bin_path`:: + + import json + + from units.compat import unittest + from units.compat.mock import patch + from ansible.module_utils import basic + from ansible.module_utils._text import to_bytes + from ansible.modules.namespace import my_module + + + def set_module_args(args): + """prepare arguments so that they will be picked up during module creation""" + args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) + basic._ANSIBLE_ARGS = to_bytes(args) + + + class AnsibleExitJson(Exception): + """Exception class to be raised by module.exit_json and caught by the test case""" + pass + + + class AnsibleFailJson(Exception): + """Exception class to be raised by module.fail_json and caught by the test case""" + pass + + + def exit_json(*args, **kwargs): + """function to patch over exit_json; package return data into an exception""" + if 'changed' not in kwargs: + kwargs['changed'] = False + raise AnsibleExitJson(kwargs) + + + def fail_json(*args, **kwargs): + """function to patch over fail_json; package return data into an exception""" + kwargs['failed'] = True + raise AnsibleFailJson(kwargs) + + + def get_bin_path(self, arg, required=False): + """Mock AnsibleModule.get_bin_path""" + if arg.endswith('my_command'): + return '/usr/bin/my_command' + else: + if required: + fail_json(msg='%r not found !' % arg) + + + class TestMyModule(unittest.TestCase): + + def setUp(self): + self.mock_module_helper = patch.multiple(basic.AnsibleModule, + exit_json=exit_json, + fail_json=fail_json, + get_bin_path=get_bin_path) + self.mock_module_helper.start() + self.addCleanup(self.mock_module_helper.stop) + + def test_module_fail_when_required_args_missing(self): + with self.assertRaises(AnsibleFailJson): + set_module_args({}) + self.module.main() + + + def test_ensure_command_called(self): + set_module_args({ + 'param1': 10, + 'param2': 'test', + }) + + with patch.object(basic.AnsibleModule, 'run_command') as mock_run_command: + stdout = 'configuration updated' + stderr = '' + rc = 0 + mock_run_command.return_value = rc, stdout, stderr # successful execution + + with self.assertRaises(AnsibleExitJson) as result: + my_module.main() + self.assertFalse(result.exception.args[0]['changed']) # ensure result is changed + + mock_run_command.assert_called_once_with('/usr/bin/my_command --value 10 --name test') + + +Restructuring modules to enable testing module set up and other processes +------------------------------------------------------------------------- + +Often modules have a ``main()`` function which sets up the module and then performs other +actions. This can make it difficult to check argument processing. This can be made easier by +moving module configuration and initialization into a separate function. For example:: + + argument_spec = dict( + # module function variables + state=dict(choices=['absent', 'present', 'rebooted', 'restarted'], default='present'), + apply_immediately=dict(type='bool', default=False), + wait=dict(type='bool', default=False), + wait_timeout=dict(type='int', default=600), + allocated_storage=dict(type='int', aliases=['size']), + db_instance_identifier=dict(aliases=["id"], required=True), + ) + + def setup_module_object(): + module = AnsibleAWSModule( + argument_spec=argument_spec, + required_if=required_if, + mutually_exclusive=[['old_instance_id', 'source_db_instance_identifier', + 'db_snapshot_identifier']], + ) + return module + + def main(): + module = setup_module_object() + validate_parameters(module) + conn = setup_client(module) + return_dict = run_task(module, conn) + module.exit_json(**return_dict) + +This now makes it possible to run tests against the module initiation function:: + + def test_rds_module_setup_fails_if_db_instance_identifier_parameter_missing(): + # db_instance_identifier parameter is missing + set_module_args({ + 'state': 'absent', + 'apply_immediately': 'True', + }) + + with self.assertRaises(AnsibleFailJson) as result: + self.module.setup_json + +See also ``test/units/module_utils/aws/test_rds.py`` + +Note that the ``argument_spec`` dictionary is visible in a module variable. This has +advantages, both in allowing explicit testing of the arguments and in allowing the easy +creation of module objects for testing. + +The same restructuring technique can be valuable for testing other functionality, such as the part of the module which queries the object that the module configures. + +Traps for maintaining Python 2 compatibility +============================================ + +If you use the ``mock`` library from the Python 2.6 standard library, a number of the +assert functions are missing but will return as if successful. This means that test cases should take great care *not* use +functions marked as _new_ in the Python 3 documentation, since the tests will likely always +succeed even if the code is broken when run on older versions of Python. + +A helpful development approach to this should be to ensure that all of the tests have been +run under Python 2.6 and that each assertion in the test cases has been checked to work by breaking +the code in Ansible to trigger that failure. + +.. warning:: Maintain Python 2.6 compatibility + + Please remember that modules need to maintain compatibility with Python 2.6 so the unittests for + modules should also be compatible with Python 2.6. + + +.. seealso:: + + :ref:`testing_units` + Ansible unit tests documentation + :ref:`testing_running_locally` + Running tests locally including gathering and reporting coverage data + :ref:`developing_modules_general` + Get started developing a module + `Python 3 documentation - 26.4. unittest — Unit testing framework `_ + The documentation of the unittest framework in python 3 + `Python 2 documentation - 25.3. unittest — Unit testing framework `_ + The documentation of the earliest supported unittest framework - from Python 2.6 + `pytest: helps you write better programs `_ + The documentation of pytest - the framework actually used to run Ansible unit tests + `Development Mailing List `_ + Mailing list for development topics + `Testing Your Code (from The Hitchhiker's Guide to Python!) `_ + General advice on testing Python code + `Uncle Bob's many videos on YouTube `_ + Unit testing is a part of the of various philosophies of software development, including + Extreme Programming (XP), Clean Coding. Uncle Bob talks through how to benefit from this + `"Why Most Unit Testing is Waste" `_ + An article warning against the costs of unit testing + `'A Response to "Why Most Unit Testing is Waste"' `_ + An response pointing to how to maintain the value of unit tests diff --git a/docs/docsite/rst/dev_guide/testing_validate-modules.rst b/docs/docsite/rst/dev_guide/testing_validate-modules.rst new file mode 100644 index 00000000..044a2c29 --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing_validate-modules.rst @@ -0,0 +1,165 @@ +:orphan: + +.. _testing_validate-modules: + +**************** +validate-modules +**************** + +.. contents:: Topics + +Python program to help test or validate Ansible modules. + +``validate-modules`` is one of the ``ansible-test`` Sanity Tests, see :ref:`testing_sanity` for more information. + +Originally developed by Matt Martz (@sivel) + + +Usage +===== + +.. code:: shell + + cd /path/to/ansible/source + source hacking/env-setup + ansible-test sanity --test validate-modules + +Help +==== + +.. code:: shell + + usage: validate-modules [-h] [-w] [--exclude EXCLUDE] [--arg-spec] + [--base-branch BASE_BRANCH] [--format {json,plain}] + [--output OUTPUT] + modules [modules ...] + + positional arguments: + modules Path to module or module directory + + optional arguments: + -h, --help show this help message and exit + -w, --warnings Show warnings + --exclude EXCLUDE RegEx exclusion pattern + --arg-spec Analyze module argument spec + --base-branch BASE_BRANCH + Used in determining if new options were added + --format {json,plain} + Output format. Default: "plain" + --output OUTPUT Output location, use "-" for stdout. Default "-" + + +Extending validate-modules +========================== + +The ``validate-modules`` tool has a `schema.py `_ that is used to validate the YAML blocks, such as ``DOCUMENTATION`` and ``RETURNS``. + + +Codes +===== + +============================================================ ================== ==================== ========================================================================================= + **Error Code** **Type** **Level** **Sample Message** +------------------------------------------------------------ ------------------ -------------------- ----------------------------------------------------------------------------------------- + ansible-deprecated-version Documentation Error A feature is deprecated and supposed to be removed in the current or an earlier Ansible version + ansible-invalid-version Documentation Error The Ansible version at which a feature is supposed to be removed cannot be parsed + ansible-module-not-initialized Syntax Error Execution of the module did not result in initialization of AnsibleModule + collection-deprecated-version Documentation Error A feature is deprecated and supposed to be removed in the current or an earlier collection version + collection-invalid-version Documentation Error The collection version at which a feature is supposed to be removed cannot be parsed (it must be a semantic version, see https://semver.org/) + deprecated-date Documentation Error A date before today appears as ``removed_at_date`` or in ``deprecated_aliases`` + deprecation-mismatch Documentation Error Module marked as deprecated or removed in at least one of the filename, its metadata, or in DOCUMENTATION (setting DOCUMENTATION.deprecated for deprecation or removing all Documentation for removed) but not in all three places. + doc-choices-do-not-match-spec Documentation Error Value for "choices" from the argument_spec does not match the documentation + doc-choices-incompatible-type Documentation Error Choices value from the documentation is not compatible with type defined in the argument_spec + doc-default-does-not-match-spec Documentation Error Value for "default" from the argument_spec does not match the documentation + doc-default-incompatible-type Documentation Error Default value from the documentation is not compatible with type defined in the argument_spec + doc-elements-invalid Documentation Error Documentation specifies elements for argument, when "type" is not ``list``. + doc-elements-mismatch Documentation Error Argument_spec defines elements different than documentation does + doc-missing-type Documentation Error Documentation doesn't specify a type but argument in ``argument_spec`` use default type (``str``) + doc-required-mismatch Documentation Error argument in argument_spec is required but documentation says it is not, or vice versa + doc-type-does-not-match-spec Documentation Error Argument_spec defines type different than documentation does + documentation-error Documentation Error Unknown ``DOCUMENTATION`` error + documentation-syntax-error Documentation Error Invalid ``DOCUMENTATION`` schema + illegal-future-imports Imports Error Only the following ``from __future__`` imports are allowed: ``absolute_import``, ``division``, and ``print_function``. + import-before-documentation Imports Error Import found before documentation variables. All imports must appear below ``DOCUMENTATION``/``EXAMPLES``/``RETURN`` + import-error Documentation Error ``Exception`` attempting to import module for ``argument_spec`` introspection + import-placement Locations Warning Imports should be directly below ``DOCUMENTATION``/``EXAMPLES``/``RETURN`` + imports-improper-location Imports Error Imports should be directly below ``DOCUMENTATION``/``EXAMPLES``/``RETURN`` + incompatible-choices Documentation Error Choices value from the argument_spec is not compatible with type defined in the argument_spec + incompatible-default-type Documentation Error Default value from the argument_spec is not compatible with type defined in the argument_spec + invalid-argument-name Documentation Error Argument in argument_spec must not be one of 'message', 'syslog_facility' as it is used internally by Ansible Core Engine + invalid-argument-spec Documentation Error Argument in argument_spec must be a dictionary/hash when used + invalid-argument-spec-options Documentation Error Suboptions in argument_spec are invalid + invalid-documentation Documentation Error ``DOCUMENTATION`` is not valid YAML + invalid-documentation-options Documentation Error ``DOCUMENTATION.options`` must be a dictionary/hash when used + invalid-examples Documentation Error ``EXAMPLES`` is not valid YAML + invalid-extension Naming Error Official Ansible modules must have a ``.py`` extension for python modules or a ``.ps1`` for powershell modules + invalid-module-schema Documentation Error ``AnsibleModule`` schema validation error + invalid-requires-extension Naming Error Module ``#AnsibleRequires -CSharpUtil`` should not end in .cs, Module ``#Requires`` should not end in .psm1 + invalid-tagged-version Documentation Error All version numbers specified in code have to be explicitly tagged with the collection name, in other words, ``community.general:1.2.3`` or ``ansible.builtin:2.10`` + last-line-main-call Syntax Error Call to ``main()`` not the last line (or ``removed_module()`` in the case of deprecated & docs only modules) + missing-doc-fragment Documentation Error ``DOCUMENTATION`` fragment missing + missing-existing-doc-fragment Documentation Warning Pre-existing ``DOCUMENTATION`` fragment missing + missing-documentation Documentation Error No ``DOCUMENTATION`` provided + missing-examples Documentation Error No ``EXAMPLES`` provided + missing-gplv3-license Documentation Error GPLv3 license header not found + missing-if-name-main Syntax Error Next to last line is not ``if __name__ == "__main__":`` + missing-main-call Syntax Error Did not find a call to ``main()`` (or ``removed_module()`` in the case of deprecated & docs only modules) + missing-module-utils-basic-import Imports Warning Did not find ``ansible.module_utils.basic`` import + missing-module-utils-import-csharp-requirements Imports Error No ``Ansible.ModuleUtils`` or C# Ansible util requirements/imports found + missing-powershell-interpreter Syntax Error Interpreter line is not ``#!powershell`` + missing-python-doc Naming Error Missing python documentation file + missing-python-interpreter Syntax Error Interpreter line is not ``#!/usr/bin/python`` + missing-return Documentation Error No ``RETURN`` documentation provided + missing-return-legacy Documentation Warning No ``RETURN`` documentation provided for legacy module + missing-suboption-docs Documentation Error Argument in argument_spec has sub-options but documentation does not define sub-options + module-incorrect-version-added Documentation Error Module level ``version_added`` is incorrect + module-invalid-version-added Documentation Error Module level ``version_added`` is not a valid version number + module-utils-specific-import Imports Error ``module_utils`` imports should import specific components, not ``*`` + multiple-utils-per-requires Imports Error ``Ansible.ModuleUtils`` requirements do not support multiple modules per statement + multiple-csharp-utils-per-requires Imports Error Ansible C# util requirements do not support multiple utils per statement + no-default-for-required-parameter Documentation Error Option is marked as required but specifies a default. Arguments with a default should not be marked as required + nonexistent-parameter-documented Documentation Error Argument is listed in DOCUMENTATION.options, but not accepted by the module + option-incorrect-version-added Documentation Error ``version_added`` for new option is incorrect + option-invalid-version-added Documentation Error ``version_added`` for option is not a valid version number + parameter-invalid Documentation Error Argument in argument_spec is not a valid python identifier + parameter-invalid-elements Documentation Error Value for "elements" is valid only when value of "type" is ``list`` + implied-parameter-type-mismatch Documentation Error Argument_spec implies ``type="str"`` but documentation defines it as different data type + parameter-type-not-in-doc Documentation Error Type value is defined in ``argument_spec`` but documentation doesn't specify a type + parameter-alias-repeated Parameters Error argument in argument_spec has at least one alias specified multiple times in aliases + parameter-alias-self Parameters Error argument in argument_spec is specified as its own alias + parameter-documented-multiple-times Documentation Error argument in argument_spec with aliases is documented multiple times + parameter-list-no-elements Parameters Error argument in argument_spec "type" is specified as ``list`` without defining "elements" + parameter-state-invalid-choice Parameters Error Argument ``state`` includes ``get``, ``list`` or ``info`` as a choice. Functionality should be in an ``_info`` or (if further conditions apply) ``_facts`` module. + python-syntax-error Syntax Error Python ``SyntaxError`` while parsing module + return-syntax-error Documentation Error ``RETURN`` is not valid YAML, ``RETURN`` fragments missing or invalid + return-invalid-version-added Documentation Error ``version_added`` for return value is not a valid version number + subdirectory-missing-init Naming Error Ansible module subdirectories must contain an ``__init__.py`` + try-except-missing-has Imports Warning Try/Except ``HAS_`` expression missing + undocumented-parameter Documentation Error Argument is listed in the argument_spec, but not documented in the module + unidiomatic-typecheck Syntax Error Type comparison using ``type()`` found. Use ``isinstance()`` instead + unknown-doc-fragment Documentation Warning Unknown pre-existing ``DOCUMENTATION`` error + use-boto3 Imports Error ``boto`` import found, new modules should use ``boto3`` + use-fail-json-not-sys-exit Imports Error ``sys.exit()`` call found. Should be ``exit_json``/``fail_json`` + use-module-utils-urls Imports Error ``requests`` import found, should use ``ansible.module_utils.urls`` instead + use-run-command-not-os-call Imports Error ``os.call`` used instead of ``module.run_command`` + use-run-command-not-popen Imports Error ``subprocess.Popen`` used instead of ``module.run_command`` + use-short-gplv3-license Documentation Error GPLv3 license header should be the :ref:`short form ` for new modules + mutually_exclusive-type Documentation Error mutually_exclusive entry contains non-string value + mutually_exclusive-collision Documentation Error mutually_exclusive entry has repeated terms + mutually_exclusive-unknown Documentation Error mutually_exclusive entry contains option which does not appear in argument_spec (potentially an alias of an option?) + required_one_of-type Documentation Error required_one_of entry contains non-string value + required_one_of-collision Documentation Error required_one_of entry has repeated terms + required_one_of-unknown Documentation Error required_one_of entry contains option which does not appear in argument_spec (potentially an alias of an option?) + required_together-type Documentation Error required_together entry contains non-string value + required_together-collision Documentation Error required_together entry has repeated terms + required_together-unknown Documentation Error required_together entry contains option which does not appear in argument_spec (potentially an alias of an option?) + required_if-is_one_of-type Documentation Error required_if entry has a fourth value which is not a bool + required_if-requirements-type Documentation Error required_if entry has a third value (requirements) which is not a list or tuple + required_if-requirements-collision Documentation Error required_if entry has repeated terms in requirements + required_if-requirements-unknown Documentation Error required_if entry's requirements contains option which does not appear in argument_spec (potentially an alias of an option?) + required_if-unknown-key Documentation Error required_if entry's key does not appear in argument_spec (potentially an alias of an option?) + required_if-key-in-requirements Documentation Error required_if entry contains its key in requirements list/tuple + required_if-value-type Documentation Error required_if entry's value is not of the type specified for its key + required_by-collision Documentation Error required_by entry has repeated terms + required_by-unknown Documentation Error required_by entry contains option which does not appear in argument_spec (potentially an alias of an option?) +============================================================ ================== ==================== ========================================================================================= -- cgit v1.2.3