diff options
Diffstat (limited to 'lib/ansible/module_utils/common')
21 files changed, 3535 insertions, 0 deletions
diff --git a/lib/ansible/module_utils/common/__init__.py b/lib/ansible/module_utils/common/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/lib/ansible/module_utils/common/__init__.py diff --git a/lib/ansible/module_utils/common/_collections_compat.py b/lib/ansible/module_utils/common/_collections_compat.py new file mode 100644 index 0000000..3412408 --- /dev/null +++ b/lib/ansible/module_utils/common/_collections_compat.py @@ -0,0 +1,46 @@ +# Copyright (c), Sviatoslav Sydorenko <ssydoren@redhat.com> 2018 +# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause) +"""Collections ABC import shim. + +This module is intended only for internal use. +It will go away once the bundled copy of six includes equivalent functionality. +Third parties should not use this. +""" + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +try: + """Python 3.3+ branch.""" + from collections.abc import ( + MappingView, + ItemsView, + KeysView, + ValuesView, + Mapping, MutableMapping, + Sequence, MutableSequence, + Set, MutableSet, + Container, + Hashable, + Sized, + Callable, + Iterable, + Iterator, + ) +except ImportError: + """Use old lib location under 2.6-3.2.""" + from collections import ( # type: ignore[no-redef,attr-defined] # pylint: disable=deprecated-class + MappingView, + ItemsView, + KeysView, + ValuesView, + Mapping, MutableMapping, + Sequence, MutableSequence, + Set, MutableSet, + Container, + Hashable, + Sized, + Callable, + Iterable, + Iterator, + ) diff --git a/lib/ansible/module_utils/common/_json_compat.py b/lib/ansible/module_utils/common/_json_compat.py new file mode 100644 index 0000000..787af0f --- /dev/null +++ b/lib/ansible/module_utils/common/_json_compat.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2019 Ansible Project +# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +import types +import json + +# Detect the python-json library which is incompatible +try: + if not isinstance(json.loads, types.FunctionType) or not isinstance(json.dumps, types.FunctionType): + raise ImportError('json.loads or json.dumps were not found in the imported json library.') +except AttributeError: + raise ImportError('python-json was detected, which is incompatible.') diff --git a/lib/ansible/module_utils/common/_utils.py b/lib/ansible/module_utils/common/_utils.py new file mode 100644 index 0000000..66df316 --- /dev/null +++ b/lib/ansible/module_utils/common/_utils.py @@ -0,0 +1,40 @@ +# Copyright (c) 2018, Ansible Project +# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + + +""" +Modules in _utils are waiting to find a better home. If you need to use them, be prepared for them +to move to a different location in the future. +""" + + +def get_all_subclasses(cls): + ''' + Recursively search and find all subclasses of a given class + + :arg cls: A python class + :rtype: set + :returns: The set of python classes which are the subclasses of `cls`. + + In python, you can use a class's :py:meth:`__subclasses__` method to determine what subclasses + of a class exist. However, `__subclasses__` only goes one level deep. This function searches + each child class's `__subclasses__` method to find all of the descendent classes. It then + returns an iterable of the descendent classes. + ''' + # Retrieve direct subclasses + subclasses = set(cls.__subclasses__()) + to_visit = list(subclasses) + # Then visit all subclasses + while to_visit: + for sc in to_visit: + # The current class is now visited, so remove it from list + to_visit.remove(sc) + # Appending all subclasses to visit and keep a reference of available class + for ssc in sc.__subclasses__(): + if ssc not in subclasses: + to_visit.append(ssc) + subclasses.add(ssc) + return subclasses diff --git a/lib/ansible/module_utils/common/arg_spec.py b/lib/ansible/module_utils/common/arg_spec.py new file mode 100644 index 0000000..d9f716e --- /dev/null +++ b/lib/ansible/module_utils/common/arg_spec.py @@ -0,0 +1,311 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Ansible Project +# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +from copy import deepcopy + +from ansible.module_utils.common.parameters import ( + _ADDITIONAL_CHECKS, + _get_legal_inputs, + _get_unsupported_parameters, + _handle_aliases, + _list_deprecations, + _list_no_log_values, + _set_defaults, + _validate_argument_types, + _validate_argument_values, + _validate_sub_spec, + set_fallbacks, +) + +from ansible.module_utils.common.text.converters import to_native +from ansible.module_utils.common.warnings import deprecate, warn + +from ansible.module_utils.common.validation import ( + check_mutually_exclusive, + check_required_arguments, +) + +from ansible.module_utils.errors import ( + AliasError, + AnsibleValidationErrorMultiple, + DeprecationError, + MutuallyExclusiveError, + NoLogError, + RequiredDefaultError, + RequiredError, + UnsupportedError, +) + + +class ValidationResult: + """Result of argument spec validation. + + This is the object returned by :func:`ArgumentSpecValidator.validate() + <ansible.module_utils.common.arg_spec.ArgumentSpecValidator.validate()>` + containing the validated parameters and any errors. + """ + + def __init__(self, parameters): + """ + :arg parameters: Terms to be validated and coerced to the correct type. + :type parameters: dict + """ + self._no_log_values = set() + """:class:`set` of values marked as ``no_log`` in the argument spec. This + is a temporary holding place for these values and may move in the future. + """ + + self._unsupported_parameters = set() + self._supported_parameters = dict() + self._validated_parameters = deepcopy(parameters) + self._deprecations = [] + self._warnings = [] + self._aliases = {} + self.errors = AnsibleValidationErrorMultiple() + """ + :class:`~ansible.module_utils.errors.AnsibleValidationErrorMultiple` containing all + :class:`~ansible.module_utils.errors.AnsibleValidationError` objects if there were + any failures during validation. + """ + + @property + def validated_parameters(self): + """Validated and coerced parameters.""" + return self._validated_parameters + + @property + def unsupported_parameters(self): + """:class:`set` of unsupported parameter names.""" + return self._unsupported_parameters + + @property + def error_messages(self): + """:class:`list` of all error messages from each exception in :attr:`errors`.""" + return self.errors.messages + + +class ArgumentSpecValidator: + """Argument spec validation class + + Creates a validator based on the ``argument_spec`` that can be used to + validate a number of parameters using the :meth:`validate` method. + """ + + def __init__(self, argument_spec, + mutually_exclusive=None, + required_together=None, + required_one_of=None, + required_if=None, + required_by=None, + ): + + """ + :arg argument_spec: Specification of valid parameters and their type. May + include nested argument specs. + :type argument_spec: dict[str, dict] + + :kwarg mutually_exclusive: List or list of lists of terms that should not + be provided together. + :type mutually_exclusive: list[str] or list[list[str]] + + :kwarg required_together: List of lists of terms that are required together. + :type required_together: list[list[str]] + + :kwarg required_one_of: List of lists of terms, one of which in each list + is required. + :type required_one_of: list[list[str]] + + :kwarg required_if: List of lists of ``[parameter, value, [parameters]]`` where + one of ``[parameters]`` is required if ``parameter == value``. + :type required_if: list + + :kwarg required_by: Dictionary of parameter names that contain a list of + parameters required by each key in the dictionary. + :type required_by: dict[str, list[str]] + """ + + self._mutually_exclusive = mutually_exclusive + self._required_together = required_together + self._required_one_of = required_one_of + self._required_if = required_if + self._required_by = required_by + self._valid_parameter_names = set() + self.argument_spec = argument_spec + + for key in sorted(self.argument_spec.keys()): + aliases = self.argument_spec[key].get('aliases') + if aliases: + self._valid_parameter_names.update(["{key} ({aliases})".format(key=key, aliases=", ".join(sorted(aliases)))]) + else: + self._valid_parameter_names.update([key]) + + def validate(self, parameters, *args, **kwargs): + """Validate ``parameters`` against argument spec. + + Error messages in the :class:`ValidationResult` may contain no_log values and should be + sanitized with :func:`~ansible.module_utils.common.parameters.sanitize_keys` before logging or displaying. + + :arg parameters: Parameters to validate against the argument spec + :type parameters: dict[str, dict] + + :return: :class:`ValidationResult` containing validated parameters. + + :Simple Example: + + .. code-block:: text + + argument_spec = { + 'name': {'type': 'str'}, + 'age': {'type': 'int'}, + } + + parameters = { + 'name': 'bo', + 'age': '42', + } + + validator = ArgumentSpecValidator(argument_spec) + result = validator.validate(parameters) + + if result.error_messages: + sys.exit("Validation failed: {0}".format(", ".join(result.error_messages)) + + valid_params = result.validated_parameters + """ + + result = ValidationResult(parameters) + + result._no_log_values.update(set_fallbacks(self.argument_spec, result._validated_parameters)) + + alias_warnings = [] + alias_deprecations = [] + try: + result._aliases.update(_handle_aliases(self.argument_spec, result._validated_parameters, alias_warnings, alias_deprecations)) + except (TypeError, ValueError) as e: + result.errors.append(AliasError(to_native(e))) + + legal_inputs = _get_legal_inputs(self.argument_spec, result._validated_parameters, result._aliases) + + for option, alias in alias_warnings: + result._warnings.append({'option': option, 'alias': alias}) + + for deprecation in alias_deprecations: + result._deprecations.append({ + 'msg': "Alias '%s' is deprecated. See the module docs for more information" % deprecation['name'], + 'version': deprecation.get('version'), + 'date': deprecation.get('date'), + 'collection_name': deprecation.get('collection_name'), + }) + + try: + result._no_log_values.update(_list_no_log_values(self.argument_spec, result._validated_parameters)) + except TypeError as te: + result.errors.append(NoLogError(to_native(te))) + + try: + result._deprecations.extend(_list_deprecations(self.argument_spec, result._validated_parameters)) + except TypeError as te: + result.errors.append(DeprecationError(to_native(te))) + + try: + result._unsupported_parameters.update( + _get_unsupported_parameters( + self.argument_spec, + result._validated_parameters, + legal_inputs, + store_supported=result._supported_parameters, + ) + ) + except TypeError as te: + result.errors.append(RequiredDefaultError(to_native(te))) + except ValueError as ve: + result.errors.append(AliasError(to_native(ve))) + + try: + check_mutually_exclusive(self._mutually_exclusive, result._validated_parameters) + except TypeError as te: + result.errors.append(MutuallyExclusiveError(to_native(te))) + + result._no_log_values.update(_set_defaults(self.argument_spec, result._validated_parameters, False)) + + try: + check_required_arguments(self.argument_spec, result._validated_parameters) + except TypeError as e: + result.errors.append(RequiredError(to_native(e))) + + _validate_argument_types(self.argument_spec, result._validated_parameters, errors=result.errors) + _validate_argument_values(self.argument_spec, result._validated_parameters, errors=result.errors) + + for check in _ADDITIONAL_CHECKS: + try: + check['func'](getattr(self, "_{attr}".format(attr=check['attr'])), result._validated_parameters) + except TypeError as te: + result.errors.append(check['err'](to_native(te))) + + result._no_log_values.update(_set_defaults(self.argument_spec, result._validated_parameters)) + + alias_deprecations = [] + _validate_sub_spec(self.argument_spec, result._validated_parameters, + errors=result.errors, + no_log_values=result._no_log_values, + unsupported_parameters=result._unsupported_parameters, + supported_parameters=result._supported_parameters, + alias_deprecations=alias_deprecations,) + for deprecation in alias_deprecations: + result._deprecations.append({ + 'msg': "Alias '%s' is deprecated. See the module docs for more information" % deprecation['name'], + 'version': deprecation.get('version'), + 'date': deprecation.get('date'), + 'collection_name': deprecation.get('collection_name'), + }) + + if result._unsupported_parameters: + flattened_names = [] + for item in result._unsupported_parameters: + if isinstance(item, tuple): + flattened_names.append(".".join(item)) + else: + flattened_names.append(item) + + unsupported_string = ", ".join(sorted(list(flattened_names))) + supported_params = supported_aliases = [] + if result._supported_parameters.get(item): + supported_params = sorted(list(result._supported_parameters[item][0])) + supported_aliases = sorted(list(result._supported_parameters[item][1])) + supported_string = ", ".join(supported_params) + if supported_aliases: + aliases_string = ", ".join(supported_aliases) + supported_string += " (%s)" % aliases_string + + msg = "{0}. Supported parameters include: {1}.".format(unsupported_string, supported_string) + result.errors.append(UnsupportedError(msg)) + + return result + + +class ModuleArgumentSpecValidator(ArgumentSpecValidator): + """Argument spec validation class used by :class:`AnsibleModule`. + + This is not meant to be used outside of :class:`AnsibleModule`. Use + :class:`ArgumentSpecValidator` instead. + """ + + def __init__(self, *args, **kwargs): + super(ModuleArgumentSpecValidator, self).__init__(*args, **kwargs) + + def validate(self, parameters): + result = super(ModuleArgumentSpecValidator, self).validate(parameters) + + for d in result._deprecations: + deprecate(d['msg'], + version=d.get('version'), date=d.get('date'), + collection_name=d.get('collection_name')) + + for w in result._warnings: + warn('Both option {option} and its alias {alias} are set.'.format(option=w['option'], alias=w['alias'])) + + return result diff --git a/lib/ansible/module_utils/common/collections.py b/lib/ansible/module_utils/common/collections.py new file mode 100644 index 0000000..fdb9108 --- /dev/null +++ b/lib/ansible/module_utils/common/collections.py @@ -0,0 +1,112 @@ +# Copyright: (c) 2018, Sviatoslav Sydorenko <ssydoren@redhat.com> +# Copyright: (c) 2018, Ansible Project +# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause) +"""Collection of low-level utility functions.""" + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + + +from ansible.module_utils.six import binary_type, text_type +from ansible.module_utils.common._collections_compat import Hashable, Mapping, MutableMapping, Sequence + + +class ImmutableDict(Hashable, Mapping): + """Dictionary that cannot be updated""" + def __init__(self, *args, **kwargs): + self._store = dict(*args, **kwargs) + + def __getitem__(self, key): + return self._store[key] + + def __iter__(self): + return self._store.__iter__() + + def __len__(self): + return self._store.__len__() + + def __hash__(self): + return hash(frozenset(self.items())) + + def __eq__(self, other): + try: + if self.__hash__() == hash(other): + return True + except TypeError: + pass + + return False + + def __repr__(self): + return 'ImmutableDict({0})'.format(repr(self._store)) + + def union(self, overriding_mapping): + """ + Create an ImmutableDict as a combination of the original and overriding_mapping + + :arg overriding_mapping: A Mapping of replacement and additional items + :return: A copy of the ImmutableDict with key-value pairs from the overriding_mapping added + + If any of the keys in overriding_mapping are already present in the original ImmutableDict, + the overriding_mapping item replaces the one in the original ImmutableDict. + """ + return ImmutableDict(self._store, **overriding_mapping) + + def difference(self, subtractive_iterable): + """ + Create an ImmutableDict as a combination of the original minus keys in subtractive_iterable + + :arg subtractive_iterable: Any iterable containing keys that should not be present in the + new ImmutableDict + :return: A copy of the ImmutableDict with keys from the subtractive_iterable removed + """ + remove_keys = frozenset(subtractive_iterable) + keys = (k for k in self._store.keys() if k not in remove_keys) + return ImmutableDict((k, self._store[k]) for k in keys) + + +def is_string(seq): + """Identify whether the input has a string-like type (inclding bytes).""" + # AnsibleVaultEncryptedUnicode inherits from Sequence, but is expected to be a string like object + return isinstance(seq, (text_type, binary_type)) or getattr(seq, '__ENCRYPTED__', False) + + +def is_iterable(seq, include_strings=False): + """Identify whether the input is an iterable.""" + if not include_strings and is_string(seq): + return False + + try: + iter(seq) + return True + except TypeError: + return False + + +def is_sequence(seq, include_strings=False): + """Identify whether the input is a sequence. + + Strings and bytes are not sequences here, + unless ``include_string`` is ``True``. + + Non-indexable things are never of a sequence type. + """ + if not include_strings and is_string(seq): + return False + + return isinstance(seq, Sequence) + + +def count(seq): + """Returns a dictionary with the number of appearances of each element of the iterable. + + Resembles the collections.Counter class functionality. It is meant to be used when the + code is run on Python 2.6.* where collections.Counter is not available. It should be + deprecated and replaced when support for Python < 2.7 is dropped. + """ + if not is_iterable(seq): + raise Exception('Argument provided is not an iterable') + counters = dict() + for elem in seq: + counters[elem] = counters.get(elem, 0) + 1 + return counters diff --git a/lib/ansible/module_utils/common/dict_transformations.py b/lib/ansible/module_utils/common/dict_transformations.py new file mode 100644 index 0000000..ffd0645 --- /dev/null +++ b/lib/ansible/module_utils/common/dict_transformations.py @@ -0,0 +1,154 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2018, Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + + +import re +from copy import deepcopy + +from ansible.module_utils.common._collections_compat import MutableMapping + + +def camel_dict_to_snake_dict(camel_dict, reversible=False, ignore_list=()): + """ + reversible allows two way conversion of a camelized dict + such that snake_dict_to_camel_dict(camel_dict_to_snake_dict(x)) == x + + This is achieved through mapping e.g. HTTPEndpoint to h_t_t_p_endpoint + where the default would be simply http_endpoint, which gets turned into + HttpEndpoint if recamelized. + + ignore_list is used to avoid converting a sub-tree of a dict. This is + particularly important for tags, where keys are case-sensitive. We convert + the 'Tags' key but nothing below. + """ + + def value_is_list(camel_list): + + checked_list = [] + for item in camel_list: + if isinstance(item, dict): + checked_list.append(camel_dict_to_snake_dict(item, reversible)) + elif isinstance(item, list): + checked_list.append(value_is_list(item)) + else: + checked_list.append(item) + + return checked_list + + snake_dict = {} + for k, v in camel_dict.items(): + if isinstance(v, dict) and k not in ignore_list: + snake_dict[_camel_to_snake(k, reversible=reversible)] = camel_dict_to_snake_dict(v, reversible) + elif isinstance(v, list) and k not in ignore_list: + snake_dict[_camel_to_snake(k, reversible=reversible)] = value_is_list(v) + else: + snake_dict[_camel_to_snake(k, reversible=reversible)] = v + + return snake_dict + + +def snake_dict_to_camel_dict(snake_dict, capitalize_first=False): + """ + Perhaps unexpectedly, snake_dict_to_camel_dict returns dromedaryCase + rather than true CamelCase. Passing capitalize_first=True returns + CamelCase. The default remains False as that was the original implementation + """ + + def camelize(complex_type, capitalize_first=False): + if complex_type is None: + return + new_type = type(complex_type)() + if isinstance(complex_type, dict): + for key in complex_type: + new_type[_snake_to_camel(key, capitalize_first)] = camelize(complex_type[key], capitalize_first) + elif isinstance(complex_type, list): + for i in range(len(complex_type)): + new_type.append(camelize(complex_type[i], capitalize_first)) + else: + return complex_type + return new_type + + return camelize(snake_dict, capitalize_first) + + +def _snake_to_camel(snake, capitalize_first=False): + if capitalize_first: + return ''.join(x.capitalize() or '_' for x in snake.split('_')) + else: + return snake.split('_')[0] + ''.join(x.capitalize() or '_' for x in snake.split('_')[1:]) + + +def _camel_to_snake(name, reversible=False): + + def prepend_underscore_and_lower(m): + return '_' + m.group(0).lower() + + if reversible: + upper_pattern = r'[A-Z]' + else: + # Cope with pluralized abbreviations such as TargetGroupARNs + # that would otherwise be rendered target_group_ar_ns + upper_pattern = r'[A-Z]{3,}s$' + + s1 = re.sub(upper_pattern, prepend_underscore_and_lower, name) + # Handle when there was nothing before the plural_pattern + if s1.startswith("_") and not name.startswith("_"): + s1 = s1[1:] + if reversible: + return s1 + + # Remainder of solution seems to be https://stackoverflow.com/a/1176023 + first_cap_pattern = r'(.)([A-Z][a-z]+)' + all_cap_pattern = r'([a-z0-9])([A-Z]+)' + s2 = re.sub(first_cap_pattern, r'\1_\2', s1) + return re.sub(all_cap_pattern, r'\1_\2', s2).lower() + + +def dict_merge(a, b): + '''recursively merges dicts. not just simple a['key'] = b['key'], if + both a and b have a key whose value is a dict then dict_merge is called + on both values and the result stored in the returned dictionary.''' + if not isinstance(b, dict): + return b + result = deepcopy(a) + for k, v in b.items(): + if k in result and isinstance(result[k], dict): + result[k] = dict_merge(result[k], v) + else: + result[k] = deepcopy(v) + return result + + +def recursive_diff(dict1, dict2): + """Recursively diff two dictionaries + + Raises ``TypeError`` for incorrect argument type. + + :arg dict1: Dictionary to compare against. + :arg dict2: Dictionary to compare with ``dict1``. + :return: Tuple of dictionaries of differences or ``None`` if there are no differences. + """ + + if not all((isinstance(item, MutableMapping) for item in (dict1, dict2))): + raise TypeError("Unable to diff 'dict1' %s and 'dict2' %s. " + "Both must be a dictionary." % (type(dict1), type(dict2))) + + left = dict((k, v) for (k, v) in dict1.items() if k not in dict2) + right = dict((k, v) for (k, v) in dict2.items() if k not in dict1) + for k in (set(dict1.keys()) & set(dict2.keys())): + if isinstance(dict1[k], dict) and isinstance(dict2[k], dict): + result = recursive_diff(dict1[k], dict2[k]) + if result: + left[k] = result[0] + right[k] = result[1] + elif dict1[k] != dict2[k]: + left[k] = dict1[k] + right[k] = dict2[k] + if left or right: + return left, right + return None diff --git a/lib/ansible/module_utils/common/file.py b/lib/ansible/module_utils/common/file.py new file mode 100644 index 0000000..1e83660 --- /dev/null +++ b/lib/ansible/module_utils/common/file.py @@ -0,0 +1,205 @@ +# Copyright (c) 2018, Ansible Project +# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause) + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import errno +import os +import stat +import re +import pwd +import grp +import time +import shutil +import traceback +import fcntl +import sys + +from contextlib import contextmanager +from ansible.module_utils._text import to_bytes, to_native, to_text +from ansible.module_utils.six import b, binary_type +from ansible.module_utils.common.warnings import deprecate + +try: + import selinux + HAVE_SELINUX = True +except ImportError: + HAVE_SELINUX = False + + +FILE_ATTRIBUTES = { + 'A': 'noatime', + 'a': 'append', + 'c': 'compressed', + 'C': 'nocow', + 'd': 'nodump', + 'D': 'dirsync', + 'e': 'extents', + 'E': 'encrypted', + 'h': 'blocksize', + 'i': 'immutable', + 'I': 'indexed', + 'j': 'journalled', + 'N': 'inline', + 's': 'zero', + 'S': 'synchronous', + 't': 'notail', + 'T': 'blockroot', + 'u': 'undelete', + 'X': 'compressedraw', + 'Z': 'compresseddirty', +} + + +# Used for parsing symbolic file perms +MODE_OPERATOR_RE = re.compile(r'[+=-]') +USERS_RE = re.compile(r'[^ugo]') +PERMS_RE = re.compile(r'[^rwxXstugo]') + + +_PERM_BITS = 0o7777 # file mode permission bits +_EXEC_PERM_BITS = 0o0111 # execute permission bits +_DEFAULT_PERM = 0o0666 # default file permission bits + + +def is_executable(path): + # This function's signature needs to be repeated + # as the first line of its docstring. + # This method is reused by the basic module, + # the repetition helps the basic module's html documentation come out right. + # http://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#confval-autodoc_docstring_signature + '''is_executable(path) + + is the given path executable? + + :arg path: The path of the file to check. + + Limitations: + + * Does not account for FSACLs. + * Most times we really want to know "Can the current user execute this + file". This function does not tell us that, only if any execute bit is set. + ''' + # These are all bitfields so first bitwise-or all the permissions we're + # looking for, then bitwise-and with the file's mode to determine if any + # execute bits are set. + return ((stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) & os.stat(path)[stat.ST_MODE]) + + +def format_attributes(attributes): + attribute_list = [FILE_ATTRIBUTES.get(attr) for attr in attributes if attr in FILE_ATTRIBUTES] + return attribute_list + + +def get_flags_from_attributes(attributes): + flags = [key for key, attr in FILE_ATTRIBUTES.items() if attr in attributes] + return ''.join(flags) + + +def get_file_arg_spec(): + arg_spec = dict( + mode=dict(type='raw'), + owner=dict(), + group=dict(), + seuser=dict(), + serole=dict(), + selevel=dict(), + setype=dict(), + attributes=dict(aliases=['attr']), + ) + return arg_spec + + +class LockTimeout(Exception): + pass + + +class FileLock: + ''' + Currently FileLock is implemented via fcntl.flock on a lock file, however this + behaviour may change in the future. Avoid mixing lock types fcntl.flock, + fcntl.lockf and module_utils.common.file.FileLock as it will certainly cause + unwanted and/or unexpected behaviour + ''' + def __init__(self): + deprecate("FileLock is not reliable and has never been used in core for that reason. There is no current alternative that works across POSIX targets", + version='2.16') + self.lockfd = None + + @contextmanager + def lock_file(self, path, tmpdir, lock_timeout=None): + ''' + Context for lock acquisition + ''' + try: + self.set_lock(path, tmpdir, lock_timeout) + yield + finally: + self.unlock() + + def set_lock(self, path, tmpdir, lock_timeout=None): + ''' + Create a lock file based on path with flock to prevent other processes + using given path. + Please note that currently file locking only works when it's executed by + the same user, I.E single user scenarios + + :kw path: Path (file) to lock + :kw tmpdir: Path where to place the temporary .lock file + :kw lock_timeout: + Wait n seconds for lock acquisition, fail if timeout is reached. + 0 = Do not wait, fail if lock cannot be acquired immediately, + Default is None, wait indefinitely until lock is released. + :returns: True + ''' + lock_path = os.path.join(tmpdir, 'ansible-{0}.lock'.format(os.path.basename(path))) + l_wait = 0.1 + r_exception = IOError + if sys.version_info[0] == 3: + r_exception = BlockingIOError + + self.lockfd = open(lock_path, 'w') + + if lock_timeout <= 0: + fcntl.flock(self.lockfd, fcntl.LOCK_EX | fcntl.LOCK_NB) + os.chmod(lock_path, stat.S_IWRITE | stat.S_IREAD) + return True + + if lock_timeout: + e_secs = 0 + while e_secs < lock_timeout: + try: + fcntl.flock(self.lockfd, fcntl.LOCK_EX | fcntl.LOCK_NB) + os.chmod(lock_path, stat.S_IWRITE | stat.S_IREAD) + return True + except r_exception: + time.sleep(l_wait) + e_secs += l_wait + continue + + self.lockfd.close() + raise LockTimeout('{0} sec'.format(lock_timeout)) + + fcntl.flock(self.lockfd, fcntl.LOCK_EX) + os.chmod(lock_path, stat.S_IWRITE | stat.S_IREAD) + + return True + + def unlock(self): + ''' + Make sure lock file is available for everyone and Unlock the file descriptor + locked by set_lock + + :returns: True + ''' + if not self.lockfd: + return True + + try: + fcntl.flock(self.lockfd, fcntl.LOCK_UN) + self.lockfd.close() + except ValueError: # file wasn't opened, let context manager fail gracefully + pass + + return True diff --git a/lib/ansible/module_utils/common/json.py b/lib/ansible/module_utils/common/json.py new file mode 100644 index 0000000..727083c --- /dev/null +++ b/lib/ansible/module_utils/common/json.py @@ -0,0 +1,86 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2019 Ansible Project +# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause) + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import json + +import datetime + +from ansible.module_utils._text import to_text +from ansible.module_utils.common._collections_compat import Mapping +from ansible.module_utils.common.collections import is_sequence + + +def _is_unsafe(value): + return getattr(value, '__UNSAFE__', False) and not getattr(value, '__ENCRYPTED__', False) + + +def _is_vault(value): + return getattr(value, '__ENCRYPTED__', False) + + +def _preprocess_unsafe_encode(value): + """Recursively preprocess a data structure converting instances of ``AnsibleUnsafe`` + into their JSON dict representations + + Used in ``AnsibleJSONEncoder.iterencode`` + """ + if _is_unsafe(value): + value = {'__ansible_unsafe': to_text(value, errors='surrogate_or_strict', nonstring='strict')} + elif is_sequence(value): + value = [_preprocess_unsafe_encode(v) for v in value] + elif isinstance(value, Mapping): + value = dict((k, _preprocess_unsafe_encode(v)) for k, v in value.items()) + + return value + + +def json_dump(structure): + return json.dumps(structure, cls=AnsibleJSONEncoder, sort_keys=True, indent=4) + + +class AnsibleJSONEncoder(json.JSONEncoder): + ''' + Simple encoder class to deal with JSON encoding of Ansible internal types + ''' + + def __init__(self, preprocess_unsafe=False, vault_to_text=False, **kwargs): + self._preprocess_unsafe = preprocess_unsafe + self._vault_to_text = vault_to_text + super(AnsibleJSONEncoder, self).__init__(**kwargs) + + # NOTE: ALWAYS inform AWS/Tower when new items get added as they consume them downstream via a callback + def default(self, o): + if getattr(o, '__ENCRYPTED__', False): + # vault object + if self._vault_to_text: + value = to_text(o, errors='surrogate_or_strict') + else: + value = {'__ansible_vault': to_text(o._ciphertext, errors='surrogate_or_strict', nonstring='strict')} + elif getattr(o, '__UNSAFE__', False): + # unsafe object, this will never be triggered, see ``AnsibleJSONEncoder.iterencode`` + value = {'__ansible_unsafe': to_text(o, errors='surrogate_or_strict', nonstring='strict')} + elif isinstance(o, Mapping): + # hostvars and other objects + value = dict(o) + elif isinstance(o, (datetime.date, datetime.datetime)): + # date object + value = o.isoformat() + else: + # use default encoder + value = super(AnsibleJSONEncoder, self).default(o) + return value + + def iterencode(self, o, **kwargs): + """Custom iterencode, primarily design to handle encoding ``AnsibleUnsafe`` + as the ``AnsibleUnsafe`` subclasses inherit from string types and + ``json.JSONEncoder`` does not support custom encoders for string types + """ + if self._preprocess_unsafe: + o = _preprocess_unsafe_encode(o) + + return super(AnsibleJSONEncoder, self).iterencode(o, **kwargs) diff --git a/lib/ansible/module_utils/common/locale.py b/lib/ansible/module_utils/common/locale.py new file mode 100644 index 0000000..a6068c8 --- /dev/null +++ b/lib/ansible/module_utils/common/locale.py @@ -0,0 +1,61 @@ +# Copyright (c), Ansible Project +# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +from ansible.module_utils._text import to_native + + +def get_best_parsable_locale(module, preferences=None, raise_on_locale=False): + ''' + Attempts to return the best possible locale for parsing output in English + useful for scraping output with i18n tools. When this raises an exception + and the caller wants to continue, it should use the 'C' locale. + + :param module: an AnsibleModule instance + :param preferences: A list of preferred locales, in order of preference + :param raise_on_locale: boolean that determines if we raise exception or not + due to locale CLI issues + :returns: The first matched preferred locale or 'C' which is the default + ''' + + found = 'C' # default posix, its ascii but always there + try: + locale = module.get_bin_path("locale") + if not locale: + # not using required=true as that forces fail_json + raise RuntimeWarning("Could not find 'locale' tool") + + available = [] + + if preferences is None: + # new POSIX standard or English cause those are messages core team expects + # yes, the last 2 are the same but some systems are weird + preferences = ['C.utf8', 'C.UTF-8', 'en_US.utf8', 'en_US.UTF-8', 'C', 'POSIX'] + + rc, out, err = module.run_command([locale, '-a']) + + if rc == 0: + if out: + available = out.strip().splitlines() + else: + raise RuntimeWarning("No output from locale, rc=%s: %s" % (rc, to_native(err))) + else: + raise RuntimeWarning("Unable to get locale information, rc=%s: %s" % (rc, to_native(err))) + + if available: + for pref in preferences: + if pref in available: + found = pref + break + + except RuntimeWarning as e: + if raise_on_locale: + raise + else: + module.debug('Failed to get locale information: %s' % to_native(e)) + + module.debug('Matched preferred locale to: %s' % found) + + return found diff --git a/lib/ansible/module_utils/common/network.py b/lib/ansible/module_utils/common/network.py new file mode 100644 index 0000000..c3874f8 --- /dev/null +++ b/lib/ansible/module_utils/common/network.py @@ -0,0 +1,161 @@ +# Copyright (c) 2016 Red Hat Inc +# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause) + +# General networking tools that may be used by all modules + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +from struct import pack +from socket import inet_ntoa + +from ansible.module_utils.six.moves import zip + + +VALID_MASKS = [2**8 - 2**i for i in range(0, 9)] + + +def is_netmask(val): + parts = str(val).split('.') + if not len(parts) == 4: + return False + for part in parts: + try: + if int(part) not in VALID_MASKS: + raise ValueError + except ValueError: + return False + return True + + +def is_masklen(val): + try: + return 0 <= int(val) <= 32 + except ValueError: + return False + + +def to_netmask(val): + """ converts a masklen to a netmask """ + if not is_masklen(val): + raise ValueError('invalid value for masklen') + + bits = 0 + for i in range(32 - int(val), 32): + bits |= (1 << i) + + return inet_ntoa(pack('>I', bits)) + + +def to_masklen(val): + """ converts a netmask to a masklen """ + if not is_netmask(val): + raise ValueError('invalid value for netmask: %s' % val) + + bits = list() + for x in val.split('.'): + octet = bin(int(x)).count('1') + bits.append(octet) + + return sum(bits) + + +def to_subnet(addr, mask, dotted_notation=False): + """ coverts an addr / mask pair to a subnet in cidr notation """ + try: + if not is_masklen(mask): + raise ValueError + cidr = int(mask) + mask = to_netmask(mask) + except ValueError: + cidr = to_masklen(mask) + + addr = addr.split('.') + mask = mask.split('.') + + network = list() + for s_addr, s_mask in zip(addr, mask): + network.append(str(int(s_addr) & int(s_mask))) + + if dotted_notation: + return '%s %s' % ('.'.join(network), to_netmask(cidr)) + return '%s/%s' % ('.'.join(network), cidr) + + +def to_ipv6_subnet(addr): + """ IPv6 addresses are eight groupings. The first four groupings (64 bits) comprise the subnet address. """ + + # https://tools.ietf.org/rfc/rfc2374.txt + + # Split by :: to identify omitted zeros + ipv6_prefix = addr.split('::')[0] + + # Get the first four groups, or as many as are found + :: + found_groups = [] + for group in ipv6_prefix.split(':'): + found_groups.append(group) + if len(found_groups) == 4: + break + if len(found_groups) < 4: + found_groups.append('::') + + # Concatenate network address parts + network_addr = '' + for group in found_groups: + if group != '::': + network_addr += str(group) + network_addr += str(':') + + # Ensure network address ends with :: + if not network_addr.endswith('::'): + network_addr += str(':') + return network_addr + + +def to_ipv6_network(addr): + """ IPv6 addresses are eight groupings. The first three groupings (48 bits) comprise the network address. """ + + # Split by :: to identify omitted zeros + ipv6_prefix = addr.split('::')[0] + + # Get the first three groups, or as many as are found + :: + found_groups = [] + for group in ipv6_prefix.split(':'): + found_groups.append(group) + if len(found_groups) == 3: + break + if len(found_groups) < 3: + found_groups.append('::') + + # Concatenate network address parts + network_addr = '' + for group in found_groups: + if group != '::': + network_addr += str(group) + network_addr += str(':') + + # Ensure network address ends with :: + if not network_addr.endswith('::'): + network_addr += str(':') + return network_addr + + +def to_bits(val): + """ converts a netmask to bits """ + bits = '' + for octet in val.split('.'): + bits += bin(int(octet))[2:].zfill(8) + return bits + + +def is_mac(mac_address): + """ + Validate MAC address for given string + Args: + mac_address: string to validate as MAC address + + Returns: (Boolean) True if string is valid MAC address, otherwise False + """ + mac_addr_regex = re.compile('[0-9a-f]{2}([-:])[0-9a-f]{2}(\\1[0-9a-f]{2}){4}$') + return bool(mac_addr_regex.match(mac_address.lower())) diff --git a/lib/ansible/module_utils/common/parameters.py b/lib/ansible/module_utils/common/parameters.py new file mode 100644 index 0000000..059ca0a --- /dev/null +++ b/lib/ansible/module_utils/common/parameters.py @@ -0,0 +1,940 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2019 Ansible Project +# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +import datetime +import os + +from collections import deque +from itertools import chain + +from ansible.module_utils.common.collections import is_iterable +from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text +from ansible.module_utils.common.text.formatters import lenient_lowercase +from ansible.module_utils.common.warnings import warn +from ansible.module_utils.errors import ( + AliasError, + AnsibleFallbackNotFound, + AnsibleValidationErrorMultiple, + ArgumentTypeError, + ArgumentValueError, + ElementError, + MutuallyExclusiveError, + NoLogError, + RequiredByError, + RequiredError, + RequiredIfError, + RequiredOneOfError, + RequiredTogetherError, + SubParameterTypeError, +) +from ansible.module_utils.parsing.convert_bool import BOOLEANS_FALSE, BOOLEANS_TRUE + +from ansible.module_utils.common._collections_compat import ( + KeysView, + Set, + Sequence, + Mapping, + MutableMapping, + MutableSet, + MutableSequence, +) + +from ansible.module_utils.six import ( + binary_type, + integer_types, + string_types, + text_type, + PY2, + PY3, +) + +from ansible.module_utils.common.validation import ( + check_mutually_exclusive, + check_required_arguments, + check_required_together, + check_required_one_of, + check_required_if, + check_required_by, + check_type_bits, + check_type_bool, + check_type_bytes, + check_type_dict, + check_type_float, + check_type_int, + check_type_jsonarg, + check_type_list, + check_type_path, + check_type_raw, + check_type_str, +) + +# Python2 & 3 way to get NoneType +NoneType = type(None) + +_ADDITIONAL_CHECKS = ( + {'func': check_required_together, 'attr': 'required_together', 'err': RequiredTogetherError}, + {'func': check_required_one_of, 'attr': 'required_one_of', 'err': RequiredOneOfError}, + {'func': check_required_if, 'attr': 'required_if', 'err': RequiredIfError}, + {'func': check_required_by, 'attr': 'required_by', 'err': RequiredByError}, +) + +# if adding boolean attribute, also add to PASS_BOOL +# some of this dupes defaults from controller config +PASS_VARS = { + 'check_mode': ('check_mode', False), + 'debug': ('_debug', False), + 'diff': ('_diff', False), + 'keep_remote_files': ('_keep_remote_files', False), + 'module_name': ('_name', None), + 'no_log': ('no_log', False), + 'remote_tmp': ('_remote_tmp', None), + 'selinux_special_fs': ('_selinux_special_fs', ['fuse', 'nfs', 'vboxsf', 'ramfs', '9p', 'vfat']), + 'shell_executable': ('_shell', '/bin/sh'), + 'socket': ('_socket_path', None), + 'string_conversion_action': ('_string_conversion_action', 'warn'), + 'syslog_facility': ('_syslog_facility', 'INFO'), + 'tmpdir': ('_tmpdir', None), + 'verbosity': ('_verbosity', 0), + 'version': ('ansible_version', '0.0'), +} + +PASS_BOOLS = ('check_mode', 'debug', 'diff', 'keep_remote_files', 'no_log') + +DEFAULT_TYPE_VALIDATORS = { + 'str': check_type_str, + 'list': check_type_list, + 'dict': check_type_dict, + 'bool': check_type_bool, + 'int': check_type_int, + 'float': check_type_float, + 'path': check_type_path, + 'raw': check_type_raw, + 'jsonarg': check_type_jsonarg, + 'json': check_type_jsonarg, + 'bytes': check_type_bytes, + 'bits': check_type_bits, +} + + +def _get_type_validator(wanted): + """Returns the callable used to validate a wanted type and the type name. + + :arg wanted: String or callable. If a string, get the corresponding + validation function from DEFAULT_TYPE_VALIDATORS. If callable, + get the name of the custom callable and return that for the type_checker. + + :returns: Tuple of callable function or None, and a string that is the name + of the wanted type. + """ + + # Use one of our builtin validators. + if not callable(wanted): + if wanted is None: + # Default type for parameters + wanted = 'str' + + type_checker = DEFAULT_TYPE_VALIDATORS.get(wanted) + + # Use the custom callable for validation. + else: + type_checker = wanted + wanted = getattr(wanted, '__name__', to_native(type(wanted))) + + return type_checker, wanted + + +def _get_legal_inputs(argument_spec, parameters, aliases=None): + if aliases is None: + aliases = _handle_aliases(argument_spec, parameters) + + return list(aliases.keys()) + list(argument_spec.keys()) + + +def _get_unsupported_parameters(argument_spec, parameters, legal_inputs=None, options_context=None, store_supported=None): + """Check keys in parameters against those provided in legal_inputs + to ensure they contain legal values. If legal_inputs are not supplied, + they will be generated using the argument_spec. + + :arg argument_spec: Dictionary of parameters, their type, and valid values. + :arg parameters: Dictionary of parameters. + :arg legal_inputs: List of valid key names property names. Overrides values + in argument_spec. + :arg options_context: List of parent keys for tracking the context of where + a parameter is defined. + + :returns: Set of unsupported parameters. Empty set if no unsupported parameters + are found. + """ + + if legal_inputs is None: + legal_inputs = _get_legal_inputs(argument_spec, parameters) + + unsupported_parameters = set() + for k in parameters.keys(): + if k not in legal_inputs: + context = k + if options_context: + context = tuple(options_context + [k]) + + unsupported_parameters.add(context) + + if store_supported is not None: + supported_aliases = _handle_aliases(argument_spec, parameters) + supported_params = [] + for option in legal_inputs: + if option in supported_aliases: + continue + supported_params.append(option) + + store_supported.update({context: (supported_params, supported_aliases)}) + + return unsupported_parameters + + +def _handle_aliases(argument_spec, parameters, alias_warnings=None, alias_deprecations=None): + """Process aliases from an argument_spec including warnings and deprecations. + + Modify ``parameters`` by adding a new key for each alias with the supplied + value from ``parameters``. + + If a list is provided to the alias_warnings parameter, it will be filled with tuples + (option, alias) in every case where both an option and its alias are specified. + + If a list is provided to alias_deprecations, it will be populated with dictionaries, + each containing deprecation information for each alias found in argument_spec. + + :param argument_spec: Dictionary of parameters, their type, and valid values. + :type argument_spec: dict + + :param parameters: Dictionary of parameters. + :type parameters: dict + + :param alias_warnings: + :type alias_warnings: list + + :param alias_deprecations: + :type alias_deprecations: list + """ + + aliases_results = {} # alias:canon + + for (k, v) in argument_spec.items(): + aliases = v.get('aliases', None) + default = v.get('default', None) + required = v.get('required', False) + + if alias_deprecations is not None: + for alias in argument_spec[k].get('deprecated_aliases', []): + if alias.get('name') in parameters: + alias_deprecations.append(alias) + + if default is not None and required: + # not alias specific but this is a good place to check this + raise ValueError("internal error: required and default are mutually exclusive for %s" % k) + + if aliases is None: + continue + + if not is_iterable(aliases) or isinstance(aliases, (binary_type, text_type)): + raise TypeError('internal error: aliases must be a list or tuple') + + for alias in aliases: + aliases_results[alias] = k + if alias in parameters: + if k in parameters and alias_warnings is not None: + alias_warnings.append((k, alias)) + parameters[k] = parameters[alias] + + return aliases_results + + +def _list_deprecations(argument_spec, parameters, prefix=''): + """Return a list of deprecations + + :arg argument_spec: An argument spec dictionary + :arg parameters: Dictionary of parameters + + :returns: List of dictionaries containing a message and version in which + the deprecated parameter will be removed, or an empty list. + + :Example return: + + .. code-block:: python + + [ + { + 'msg': "Param 'deptest' is deprecated. See the module docs for more information", + 'version': '2.9' + } + ] + """ + + deprecations = [] + for arg_name, arg_opts in argument_spec.items(): + if arg_name in parameters: + if prefix: + sub_prefix = '%s["%s"]' % (prefix, arg_name) + else: + sub_prefix = arg_name + if arg_opts.get('removed_at_date') is not None: + deprecations.append({ + 'msg': "Param '%s' is deprecated. See the module docs for more information" % sub_prefix, + 'date': arg_opts.get('removed_at_date'), + 'collection_name': arg_opts.get('removed_from_collection'), + }) + elif arg_opts.get('removed_in_version') is not None: + deprecations.append({ + 'msg': "Param '%s' is deprecated. See the module docs for more information" % sub_prefix, + 'version': arg_opts.get('removed_in_version'), + 'collection_name': arg_opts.get('removed_from_collection'), + }) + # Check sub-argument spec + sub_argument_spec = arg_opts.get('options') + if sub_argument_spec is not None: + sub_arguments = parameters[arg_name] + if isinstance(sub_arguments, Mapping): + sub_arguments = [sub_arguments] + if isinstance(sub_arguments, list): + for sub_params in sub_arguments: + if isinstance(sub_params, Mapping): + deprecations.extend(_list_deprecations(sub_argument_spec, sub_params, prefix=sub_prefix)) + + return deprecations + + +def _list_no_log_values(argument_spec, params): + """Return set of no log values + + :arg argument_spec: An argument spec dictionary + :arg params: Dictionary of all parameters + + :returns: :class:`set` of strings that should be hidden from output: + """ + + no_log_values = set() + for arg_name, arg_opts in argument_spec.items(): + if arg_opts.get('no_log', False): + # Find the value for the no_log'd param + no_log_object = params.get(arg_name, None) + + if no_log_object: + try: + no_log_values.update(_return_datastructure_name(no_log_object)) + except TypeError as e: + raise TypeError('Failed to convert "%s": %s' % (arg_name, to_native(e))) + + # Get no_log values from suboptions + sub_argument_spec = arg_opts.get('options') + if sub_argument_spec is not None: + wanted_type = arg_opts.get('type') + sub_parameters = params.get(arg_name) + + if sub_parameters is not None: + if wanted_type == 'dict' or (wanted_type == 'list' and arg_opts.get('elements', '') == 'dict'): + # Sub parameters can be a dict or list of dicts. Ensure parameters are always a list. + if not isinstance(sub_parameters, list): + sub_parameters = [sub_parameters] + + for sub_param in sub_parameters: + # Validate dict fields in case they came in as strings + + if isinstance(sub_param, string_types): + sub_param = check_type_dict(sub_param) + + if not isinstance(sub_param, Mapping): + raise TypeError("Value '{1}' in the sub parameter field '{0}' must by a {2}, " + "not '{1.__class__.__name__}'".format(arg_name, sub_param, wanted_type)) + + no_log_values.update(_list_no_log_values(sub_argument_spec, sub_param)) + + return no_log_values + + +def _return_datastructure_name(obj): + """ Return native stringified values from datastructures. + + For use with removing sensitive values pre-jsonification.""" + if isinstance(obj, (text_type, binary_type)): + if obj: + yield to_native(obj, errors='surrogate_or_strict') + return + elif isinstance(obj, Mapping): + for element in obj.items(): + for subelement in _return_datastructure_name(element[1]): + yield subelement + elif is_iterable(obj): + for element in obj: + for subelement in _return_datastructure_name(element): + yield subelement + elif obj is None or isinstance(obj, bool): + # This must come before int because bools are also ints + return + elif isinstance(obj, tuple(list(integer_types) + [float])): + yield to_native(obj, nonstring='simplerepr') + else: + raise TypeError('Unknown parameter type: %s' % (type(obj))) + + +def _remove_values_conditions(value, no_log_strings, deferred_removals): + """ + Helper function for :meth:`remove_values`. + + :arg value: The value to check for strings that need to be stripped + :arg no_log_strings: set of strings which must be stripped out of any values + :arg deferred_removals: List which holds information about nested + containers that have to be iterated for removals. It is passed into + this function so that more entries can be added to it if value is + a container type. The format of each entry is a 2-tuple where the first + element is the ``value`` parameter and the second value is a new + container to copy the elements of ``value`` into once iterated. + + :returns: if ``value`` is a scalar, returns ``value`` with two exceptions: + + 1. :class:`~datetime.datetime` objects which are changed into a string representation. + 2. objects which are in ``no_log_strings`` are replaced with a placeholder + so that no sensitive data is leaked. + + If ``value`` is a container type, returns a new empty container. + + ``deferred_removals`` is added to as a side-effect of this function. + + .. warning:: It is up to the caller to make sure the order in which value + is passed in is correct. For instance, higher level containers need + to be passed in before lower level containers. For example, given + ``{'level1': {'level2': 'level3': [True]} }`` first pass in the + dictionary for ``level1``, then the dict for ``level2``, and finally + the list for ``level3``. + """ + if isinstance(value, (text_type, binary_type)): + # Need native str type + native_str_value = value + if isinstance(value, text_type): + value_is_text = True + if PY2: + native_str_value = to_bytes(value, errors='surrogate_or_strict') + elif isinstance(value, binary_type): + value_is_text = False + if PY3: + native_str_value = to_text(value, errors='surrogate_or_strict') + + if native_str_value in no_log_strings: + return 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER' + for omit_me in no_log_strings: + native_str_value = native_str_value.replace(omit_me, '*' * 8) + + if value_is_text and isinstance(native_str_value, binary_type): + value = to_text(native_str_value, encoding='utf-8', errors='surrogate_then_replace') + elif not value_is_text and isinstance(native_str_value, text_type): + value = to_bytes(native_str_value, encoding='utf-8', errors='surrogate_then_replace') + else: + value = native_str_value + + elif isinstance(value, Sequence): + if isinstance(value, MutableSequence): + new_value = type(value)() + else: + new_value = [] # Need a mutable value + deferred_removals.append((value, new_value)) + value = new_value + + elif isinstance(value, Set): + if isinstance(value, MutableSet): + new_value = type(value)() + else: + new_value = set() # Need a mutable value + deferred_removals.append((value, new_value)) + value = new_value + + elif isinstance(value, Mapping): + if isinstance(value, MutableMapping): + new_value = type(value)() + else: + new_value = {} # Need a mutable value + deferred_removals.append((value, new_value)) + value = new_value + + elif isinstance(value, tuple(chain(integer_types, (float, bool, NoneType)))): + stringy_value = to_native(value, encoding='utf-8', errors='surrogate_or_strict') + if stringy_value in no_log_strings: + return 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER' + for omit_me in no_log_strings: + if omit_me in stringy_value: + return 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER' + + elif isinstance(value, (datetime.datetime, datetime.date)): + value = value.isoformat() + else: + raise TypeError('Value of unknown type: %s, %s' % (type(value), value)) + + return value + + +def _set_defaults(argument_spec, parameters, set_default=True): + """Set default values for parameters when no value is supplied. + + Modifies parameters directly. + + :arg argument_spec: Argument spec + :type argument_spec: dict + + :arg parameters: Parameters to evaluate + :type parameters: dict + + :kwarg set_default: Whether or not to set the default values + :type set_default: bool + + :returns: Set of strings that should not be logged. + :rtype: set + """ + + no_log_values = set() + for param, value in argument_spec.items(): + + # TODO: Change the default value from None to Sentinel to differentiate between + # user supplied None and a default value set by this function. + default = value.get('default', None) + + # This prevents setting defaults on required items on the 1st run, + # otherwise will set things without a default to None on the 2nd. + if param not in parameters and (default is not None or set_default): + # Make sure any default value for no_log fields are masked. + if value.get('no_log', False) and default: + no_log_values.add(default) + + parameters[param] = default + + return no_log_values + + +def _sanitize_keys_conditions(value, no_log_strings, ignore_keys, deferred_removals): + """ Helper method to :func:`sanitize_keys` to build ``deferred_removals`` and avoid deep recursion. """ + if isinstance(value, (text_type, binary_type)): + return value + + if isinstance(value, Sequence): + if isinstance(value, MutableSequence): + new_value = type(value)() + else: + new_value = [] # Need a mutable value + deferred_removals.append((value, new_value)) + return new_value + + if isinstance(value, Set): + if isinstance(value, MutableSet): + new_value = type(value)() + else: + new_value = set() # Need a mutable value + deferred_removals.append((value, new_value)) + return new_value + + if isinstance(value, Mapping): + if isinstance(value, MutableMapping): + new_value = type(value)() + else: + new_value = {} # Need a mutable value + deferred_removals.append((value, new_value)) + return new_value + + if isinstance(value, tuple(chain(integer_types, (float, bool, NoneType)))): + return value + + if isinstance(value, (datetime.datetime, datetime.date)): + return value + + raise TypeError('Value of unknown type: %s, %s' % (type(value), value)) + + +def _validate_elements(wanted_type, parameter, values, options_context=None, errors=None): + + if errors is None: + errors = AnsibleValidationErrorMultiple() + + type_checker, wanted_element_type = _get_type_validator(wanted_type) + validated_parameters = [] + # Get param name for strings so we can later display this value in a useful error message if needed + # Only pass 'kwargs' to our checkers and ignore custom callable checkers + kwargs = {} + if wanted_element_type == 'str' and isinstance(wanted_type, string_types): + if isinstance(parameter, string_types): + kwargs['param'] = parameter + elif isinstance(parameter, dict): + kwargs['param'] = list(parameter.keys())[0] + + for value in values: + try: + validated_parameters.append(type_checker(value, **kwargs)) + except (TypeError, ValueError) as e: + msg = "Elements value for option '%s'" % parameter + if options_context: + msg += " found in '%s'" % " -> ".join(options_context) + msg += " is of type %s and we were unable to convert to %s: %s" % (type(value), wanted_element_type, to_native(e)) + errors.append(ElementError(msg)) + return validated_parameters + + +def _validate_argument_types(argument_spec, parameters, prefix='', options_context=None, errors=None): + """Validate that parameter types match the type in the argument spec. + + Determine the appropriate type checker function and run each + parameter value through that function. All error messages from type checker + functions are returned. If any parameter fails to validate, it will not + be in the returned parameters. + + :arg argument_spec: Argument spec + :type argument_spec: dict + + :arg parameters: Parameters + :type parameters: dict + + :kwarg prefix: Name of the parent key that contains the spec. Used in the error message + :type prefix: str + + :kwarg options_context: List of contexts? + :type options_context: list + + :returns: Two item tuple containing validated and coerced parameters + and a list of any errors that were encountered. + :rtype: tuple + + """ + + if errors is None: + errors = AnsibleValidationErrorMultiple() + + for param, spec in argument_spec.items(): + if param not in parameters: + continue + + value = parameters[param] + if value is None: + continue + + wanted_type = spec.get('type') + type_checker, wanted_name = _get_type_validator(wanted_type) + # Get param name for strings so we can later display this value in a useful error message if needed + # Only pass 'kwargs' to our checkers and ignore custom callable checkers + kwargs = {} + if wanted_name == 'str' and isinstance(wanted_type, string_types): + kwargs['param'] = list(parameters.keys())[0] + + # Get the name of the parent key if this is a nested option + if prefix: + kwargs['prefix'] = prefix + + try: + parameters[param] = type_checker(value, **kwargs) + elements_wanted_type = spec.get('elements', None) + if elements_wanted_type: + elements = parameters[param] + if wanted_type != 'list' or not isinstance(elements, list): + msg = "Invalid type %s for option '%s'" % (wanted_name, elements) + if options_context: + msg += " found in '%s'." % " -> ".join(options_context) + msg += ", elements value check is supported only with 'list' type" + errors.append(ArgumentTypeError(msg)) + parameters[param] = _validate_elements(elements_wanted_type, param, elements, options_context, errors) + + except (TypeError, ValueError) as e: + msg = "argument '%s' is of type %s" % (param, type(value)) + if options_context: + msg += " found in '%s'." % " -> ".join(options_context) + msg += " and we were unable to convert to %s: %s" % (wanted_name, to_native(e)) + errors.append(ArgumentTypeError(msg)) + + +def _validate_argument_values(argument_spec, parameters, options_context=None, errors=None): + """Ensure all arguments have the requested values, and there are no stray arguments""" + + if errors is None: + errors = AnsibleValidationErrorMultiple() + + for param, spec in argument_spec.items(): + choices = spec.get('choices') + if choices is None: + continue + + if isinstance(choices, (frozenset, KeysView, Sequence)) and not isinstance(choices, (binary_type, text_type)): + if param in parameters: + # Allow one or more when type='list' param with choices + if isinstance(parameters[param], list): + diff_list = [item for item in parameters[param] if item not in choices] + if diff_list: + choices_str = ", ".join([to_native(c) for c in choices]) + diff_str = ", ".join(diff_list) + msg = "value of %s must be one or more of: %s. Got no match for: %s" % (param, choices_str, diff_str) + if options_context: + msg = "{0} found in {1}".format(msg, " -> ".join(options_context)) + errors.append(ArgumentValueError(msg)) + elif parameters[param] not in choices: + # PyYaml converts certain strings to bools. If we can unambiguously convert back, do so before checking + # the value. If we can't figure this out, module author is responsible. + if parameters[param] == 'False': + overlap = BOOLEANS_FALSE.intersection(choices) + if len(overlap) == 1: + # Extract from a set + (parameters[param],) = overlap + + if parameters[param] == 'True': + overlap = BOOLEANS_TRUE.intersection(choices) + if len(overlap) == 1: + (parameters[param],) = overlap + + if parameters[param] not in choices: + choices_str = ", ".join([to_native(c) for c in choices]) + msg = "value of %s must be one of: %s, got: %s" % (param, choices_str, parameters[param]) + if options_context: + msg = "{0} found in {1}".format(msg, " -> ".join(options_context)) + errors.append(ArgumentValueError(msg)) + else: + msg = "internal error: choices for argument %s are not iterable: %s" % (param, choices) + if options_context: + msg = "{0} found in {1}".format(msg, " -> ".join(options_context)) + errors.append(ArgumentTypeError(msg)) + + +def _validate_sub_spec( + argument_spec, + parameters, + prefix="", + options_context=None, + errors=None, + no_log_values=None, + unsupported_parameters=None, + supported_parameters=None, + alias_deprecations=None, +): + """Validate sub argument spec. + + This function is recursive. + """ + + if options_context is None: + options_context = [] + + if errors is None: + errors = AnsibleValidationErrorMultiple() + + if no_log_values is None: + no_log_values = set() + + if unsupported_parameters is None: + unsupported_parameters = set() + if supported_parameters is None: + supported_parameters = dict() + + for param, value in argument_spec.items(): + wanted = value.get('type') + if wanted == 'dict' or (wanted == 'list' and value.get('elements', '') == 'dict'): + sub_spec = value.get('options') + if value.get('apply_defaults', False): + if sub_spec is not None: + if parameters.get(param) is None: + parameters[param] = {} + else: + continue + elif sub_spec is None or param not in parameters or parameters[param] is None: + continue + + # Keep track of context for warning messages + options_context.append(param) + + # Make sure we can iterate over the elements + if not isinstance(parameters[param], Sequence) or isinstance(parameters[param], string_types): + elements = [parameters[param]] + else: + elements = parameters[param] + + for idx, sub_parameters in enumerate(elements): + no_log_values.update(set_fallbacks(sub_spec, sub_parameters)) + + if not isinstance(sub_parameters, dict): + errors.append(SubParameterTypeError("value of '%s' must be of type dict or list of dicts" % param)) + continue + + # Set prefix for warning messages + new_prefix = prefix + param + if wanted == 'list': + new_prefix += '[%d]' % idx + new_prefix += '.' + + alias_warnings = [] + alias_deprecations_sub = [] + try: + options_aliases = _handle_aliases(sub_spec, sub_parameters, alias_warnings, alias_deprecations_sub) + except (TypeError, ValueError) as e: + options_aliases = {} + errors.append(AliasError(to_native(e))) + + for option, alias in alias_warnings: + warn('Both option %s%s and its alias %s%s are set.' % (new_prefix, option, new_prefix, alias)) + + if alias_deprecations is not None: + for deprecation in alias_deprecations_sub: + alias_deprecations.append({ + 'name': '%s%s' % (new_prefix, deprecation['name']), + 'version': deprecation.get('version'), + 'date': deprecation.get('date'), + 'collection_name': deprecation.get('collection_name'), + }) + + try: + no_log_values.update(_list_no_log_values(sub_spec, sub_parameters)) + except TypeError as te: + errors.append(NoLogError(to_native(te))) + + legal_inputs = _get_legal_inputs(sub_spec, sub_parameters, options_aliases) + unsupported_parameters.update( + _get_unsupported_parameters( + sub_spec, + sub_parameters, + legal_inputs, + options_context, + store_supported=supported_parameters, + ) + ) + + try: + check_mutually_exclusive(value.get('mutually_exclusive'), sub_parameters, options_context) + except TypeError as e: + errors.append(MutuallyExclusiveError(to_native(e))) + + no_log_values.update(_set_defaults(sub_spec, sub_parameters, False)) + + try: + check_required_arguments(sub_spec, sub_parameters, options_context) + except TypeError as e: + errors.append(RequiredError(to_native(e))) + + _validate_argument_types(sub_spec, sub_parameters, new_prefix, options_context, errors=errors) + _validate_argument_values(sub_spec, sub_parameters, options_context, errors=errors) + + for check in _ADDITIONAL_CHECKS: + try: + check['func'](value.get(check['attr']), sub_parameters, options_context) + except TypeError as e: + errors.append(check['err'](to_native(e))) + + no_log_values.update(_set_defaults(sub_spec, sub_parameters)) + + # Handle nested specs + _validate_sub_spec( + sub_spec, sub_parameters, new_prefix, options_context, errors, no_log_values, + unsupported_parameters, supported_parameters, alias_deprecations) + + options_context.pop() + + +def env_fallback(*args, **kwargs): + """Load value from environment variable""" + + for arg in args: + if arg in os.environ: + return os.environ[arg] + raise AnsibleFallbackNotFound + + +def set_fallbacks(argument_spec, parameters): + no_log_values = set() + for param, value in argument_spec.items(): + fallback = value.get('fallback', (None,)) + fallback_strategy = fallback[0] + fallback_args = [] + fallback_kwargs = {} + if param not in parameters and fallback_strategy is not None: + for item in fallback[1:]: + if isinstance(item, dict): + fallback_kwargs = item + else: + fallback_args = item + try: + fallback_value = fallback_strategy(*fallback_args, **fallback_kwargs) + except AnsibleFallbackNotFound: + continue + else: + if value.get('no_log', False) and fallback_value: + no_log_values.add(fallback_value) + parameters[param] = fallback_value + + return no_log_values + + +def sanitize_keys(obj, no_log_strings, ignore_keys=frozenset()): + """Sanitize the keys in a container object by removing ``no_log`` values from key names. + + This is a companion function to the :func:`remove_values` function. Similar to that function, + we make use of ``deferred_removals`` to avoid hitting maximum recursion depth in cases of + large data structures. + + :arg obj: The container object to sanitize. Non-container objects are returned unmodified. + :arg no_log_strings: A set of string values we do not want logged. + :kwarg ignore_keys: A set of string values of keys to not sanitize. + + :returns: An object with sanitized keys. + """ + + deferred_removals = deque() + + no_log_strings = [to_native(s, errors='surrogate_or_strict') for s in no_log_strings] + new_value = _sanitize_keys_conditions(obj, no_log_strings, ignore_keys, deferred_removals) + + while deferred_removals: + old_data, new_data = deferred_removals.popleft() + + if isinstance(new_data, Mapping): + for old_key, old_elem in old_data.items(): + if old_key in ignore_keys or old_key.startswith('_ansible'): + new_data[old_key] = _sanitize_keys_conditions(old_elem, no_log_strings, ignore_keys, deferred_removals) + else: + # Sanitize the old key. We take advantage of the sanitizing code in + # _remove_values_conditions() rather than recreating it here. + new_key = _remove_values_conditions(old_key, no_log_strings, None) + new_data[new_key] = _sanitize_keys_conditions(old_elem, no_log_strings, ignore_keys, deferred_removals) + else: + for elem in old_data: + new_elem = _sanitize_keys_conditions(elem, no_log_strings, ignore_keys, deferred_removals) + if isinstance(new_data, MutableSequence): + new_data.append(new_elem) + elif isinstance(new_data, MutableSet): + new_data.add(new_elem) + else: + raise TypeError('Unknown container type encountered when removing private values from keys') + + return new_value + + +def remove_values(value, no_log_strings): + """Remove strings in ``no_log_strings`` from value. + + If value is a container type, then remove a lot more. + + Use of ``deferred_removals`` exists, rather than a pure recursive solution, + because of the potential to hit the maximum recursion depth when dealing with + large amounts of data (see `issue #24560 <https://github.com/ansible/ansible/issues/24560>`_). + """ + + deferred_removals = deque() + + no_log_strings = [to_native(s, errors='surrogate_or_strict') for s in no_log_strings] + new_value = _remove_values_conditions(value, no_log_strings, deferred_removals) + + while deferred_removals: + old_data, new_data = deferred_removals.popleft() + if isinstance(new_data, Mapping): + for old_key, old_elem in old_data.items(): + new_elem = _remove_values_conditions(old_elem, no_log_strings, deferred_removals) + new_data[old_key] = new_elem + else: + for elem in old_data: + new_elem = _remove_values_conditions(elem, no_log_strings, deferred_removals) + if isinstance(new_data, MutableSequence): + new_data.append(new_elem) + elif isinstance(new_data, MutableSet): + new_data.add(new_elem) + else: + raise TypeError('Unknown container type encountered when removing private values from output') + + return new_value diff --git a/lib/ansible/module_utils/common/process.py b/lib/ansible/module_utils/common/process.py new file mode 100644 index 0000000..97761a4 --- /dev/null +++ b/lib/ansible/module_utils/common/process.py @@ -0,0 +1,46 @@ +# Copyright (c) 2018, Ansible Project +# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause) + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os + +from ansible.module_utils.common.file import is_executable + + +def get_bin_path(arg, opt_dirs=None, required=None): + ''' + Find system executable in PATH. Raises ValueError if executable is not found. + Optional arguments: + - required: [Deprecated] Prior to 2.10, if executable is not found and required is true it raises an Exception. + In 2.10 and later, an Exception is always raised. This parameter will be removed in 2.14. + - opt_dirs: optional list of directories to search in addition to PATH + In addition to PATH and opt_dirs, this function also looks through /sbin, /usr/sbin and /usr/local/sbin. A lot of + modules, especially for gathering facts, depend on this behaviour. + If found return full path, otherwise raise ValueError. + ''' + opt_dirs = [] if opt_dirs is None else opt_dirs + + sbin_paths = ['/sbin', '/usr/sbin', '/usr/local/sbin'] + paths = [] + for d in opt_dirs: + if d is not None and os.path.exists(d): + paths.append(d) + paths += os.environ.get('PATH', '').split(os.pathsep) + bin_path = None + # mangle PATH to include /sbin dirs + for p in sbin_paths: + if p not in paths and os.path.exists(p): + paths.append(p) + for d in paths: + if not d: + continue + path = os.path.join(d, arg) + if os.path.exists(path) and not os.path.isdir(path) and is_executable(path): + bin_path = path + break + if bin_path is None: + raise ValueError('Failed to find required executable "%s" in paths: %s' % (arg, os.pathsep.join(paths))) + + return bin_path diff --git a/lib/ansible/module_utils/common/respawn.py b/lib/ansible/module_utils/common/respawn.py new file mode 100644 index 0000000..3bc526a --- /dev/null +++ b/lib/ansible/module_utils/common/respawn.py @@ -0,0 +1,98 @@ +# Copyright: (c) 2021, Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import subprocess +import sys + +from ansible.module_utils.common.text.converters import to_bytes, to_native + + +def has_respawned(): + return hasattr(sys.modules['__main__'], '_respawned') + + +def respawn_module(interpreter_path): + """ + Respawn the currently-running Ansible Python module under the specified Python interpreter. + + Ansible modules that require libraries that are typically available only under well-known interpreters + (eg, ``yum``, ``apt``, ``dnf``) can use bespoke logic to determine the libraries they need are not + available, then call `respawn_module` to re-execute the current module under a different interpreter + and exit the current process when the new subprocess has completed. The respawned process inherits only + stdout/stderr from the current process. + + Only a single respawn is allowed. ``respawn_module`` will fail on nested respawns. Modules are encouraged + to call `has_respawned()` to defensively guide behavior before calling ``respawn_module``, and to ensure + that the target interpreter exists, as ``respawn_module`` will not fail gracefully. + + :arg interpreter_path: path to a Python interpreter to respawn the current module + """ + + if has_respawned(): + raise Exception('module has already been respawned') + + # FUTURE: we need a safe way to log that a respawn has occurred for forensic/debug purposes + payload = _create_payload() + stdin_read, stdin_write = os.pipe() + os.write(stdin_write, to_bytes(payload)) + os.close(stdin_write) + rc = subprocess.call([interpreter_path, '--'], stdin=stdin_read) + sys.exit(rc) # pylint: disable=ansible-bad-function + + +def probe_interpreters_for_module(interpreter_paths, module_name): + """ + Probes a supplied list of Python interpreters, returning the first one capable of + importing the named module. This is useful when attempting to locate a "system + Python" where OS-packaged utility modules are located. + + :arg interpreter_paths: iterable of paths to Python interpreters. The paths will be probed + in order, and the first path that exists and can successfully import the named module will + be returned (or ``None`` if probing fails for all supplied paths). + :arg module_name: fully-qualified Python module name to probe for (eg, ``selinux``) + """ + for interpreter_path in interpreter_paths: + if not os.path.exists(interpreter_path): + continue + try: + rc = subprocess.call([interpreter_path, '-c', 'import {0}'.format(module_name)]) + if rc == 0: + return interpreter_path + except Exception: + continue + + return None + + +def _create_payload(): + from ansible.module_utils import basic + smuggled_args = getattr(basic, '_ANSIBLE_ARGS') + if not smuggled_args: + raise Exception('unable to access ansible.module_utils.basic._ANSIBLE_ARGS (not launched by AnsiballZ?)') + module_fqn = sys.modules['__main__']._module_fqn + modlib_path = sys.modules['__main__']._modlib_path + respawn_code_template = ''' +import runpy +import sys + +module_fqn = '{module_fqn}' +modlib_path = '{modlib_path}' +smuggled_args = b"""{smuggled_args}""".strip() + + +if __name__ == '__main__': + sys.path.insert(0, modlib_path) + + from ansible.module_utils import basic + basic._ANSIBLE_ARGS = smuggled_args + + runpy.run_module(module_fqn, init_globals=dict(_respawned=True), run_name='__main__', alter_sys=True) + ''' + + respawn_code = respawn_code_template.format(module_fqn=module_fqn, modlib_path=modlib_path, smuggled_args=to_native(smuggled_args)) + + return respawn_code diff --git a/lib/ansible/module_utils/common/sys_info.py b/lib/ansible/module_utils/common/sys_info.py new file mode 100644 index 0000000..206b36c --- /dev/null +++ b/lib/ansible/module_utils/common/sys_info.py @@ -0,0 +1,157 @@ +# Copyright (c), Michael DeHaan <michael.dehaan@gmail.com>, 2012-2013 +# Copyright (c), Toshio Kuratomi <tkuratomi@ansible.com> 2016 +# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +import platform + +from ansible.module_utils import distro +from ansible.module_utils.common._utils import get_all_subclasses + + +__all__ = ('get_distribution', 'get_distribution_version', 'get_platform_subclass') + + +def get_distribution(): + ''' + Return the name of the distribution the module is running on. + + :rtype: NativeString or None + :returns: Name of the distribution the module is running on + + This function attempts to determine what distribution the code is running + on and return a string representing that value. If the platform is Linux + and the distribution cannot be determined, it returns ``OtherLinux``. + ''' + distribution = distro.id().capitalize() + + if platform.system() == 'Linux': + if distribution == 'Amzn': + distribution = 'Amazon' + elif distribution == 'Rhel': + distribution = 'Redhat' + elif not distribution: + distribution = 'OtherLinux' + + return distribution + + +def get_distribution_version(): + ''' + Get the version of the distribution the code is running on + + :rtype: NativeString or None + :returns: A string representation of the version of the distribution. If it + cannot determine the version, it returns an empty string. If this is not run on + a Linux machine it returns None. + ''' + version = None + + needs_best_version = frozenset(( + u'centos', + u'debian', + )) + + version = distro.version() + distro_id = distro.id() + + if version is not None: + if distro_id in needs_best_version: + version_best = distro.version(best=True) + + # CentoOS maintainers believe only the major version is appropriate + # but Ansible users desire minor version information, e.g., 7.5. + # https://github.com/ansible/ansible/issues/50141#issuecomment-449452781 + if distro_id == u'centos': + version = u'.'.join(version_best.split(u'.')[:2]) + + # Debian does not include minor version in /etc/os-release. + # Bug report filed upstream requesting this be added to /etc/os-release + # https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=931197 + if distro_id == u'debian': + version = version_best + + else: + version = u'' + + return version + + +def get_distribution_codename(): + ''' + Return the code name for this Linux Distribution + + :rtype: NativeString or None + :returns: A string representation of the distribution's codename or None if not a Linux distro + ''' + codename = None + if platform.system() == 'Linux': + # Until this gets merged and we update our bundled copy of distro: + # https://github.com/nir0s/distro/pull/230 + # Fixes Fedora 28+ not having a code name and Ubuntu Xenial Xerus needing to be "xenial" + os_release_info = distro.os_release_info() + codename = os_release_info.get('version_codename') + + if codename is None: + codename = os_release_info.get('ubuntu_codename') + + if codename is None and distro.id() == 'ubuntu': + lsb_release_info = distro.lsb_release_info() + codename = lsb_release_info.get('codename') + + if codename is None: + codename = distro.codename() + if codename == u'': + codename = None + + return codename + + +def get_platform_subclass(cls): + ''' + Finds a subclass implementing desired functionality on the platform the code is running on + + :arg cls: Class to find an appropriate subclass for + :returns: A class that implements the functionality on this platform + + Some Ansible modules have different implementations depending on the platform they run on. This + function is used to select between the various implementations and choose one. You can look at + the implementation of the Ansible :ref:`User module<user_module>` module for an example of how to use this. + + This function replaces ``basic.load_platform_subclass()``. When you port code, you need to + change the callers to be explicit about instantiating the class. For instance, code in the + Ansible User module changed from:: + + .. code-block:: python + + # Old + class User: + def __new__(cls, args, kwargs): + return load_platform_subclass(User, args, kwargs) + + # New + class User: + def __new__(cls, *args, **kwargs): + new_cls = get_platform_subclass(User) + return super(cls, new_cls).__new__(new_cls) + ''' + this_platform = platform.system() + distribution = get_distribution() + + subclass = None + + # get the most specific superclass for this platform + if distribution is not None: + for sc in get_all_subclasses(cls): + if sc.distribution is not None and sc.distribution == distribution and sc.platform == this_platform: + subclass = sc + if subclass is None: + for sc in get_all_subclasses(cls): + if sc.platform == this_platform and sc.distribution is None: + subclass = sc + if subclass is None: + subclass = cls + + return subclass diff --git a/lib/ansible/module_utils/common/text/__init__.py b/lib/ansible/module_utils/common/text/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/lib/ansible/module_utils/common/text/__init__.py diff --git a/lib/ansible/module_utils/common/text/converters.py b/lib/ansible/module_utils/common/text/converters.py new file mode 100644 index 0000000..5b25df4 --- /dev/null +++ b/lib/ansible/module_utils/common/text/converters.py @@ -0,0 +1,322 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2019 Ansible Project +# (c) 2016 Toshio Kuratomi <tkuratomi@ansible.com> +# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +import codecs +import datetime +import json + +from ansible.module_utils.common._collections_compat import Set +from ansible.module_utils.six import ( + PY3, + binary_type, + iteritems, + text_type, +) + +try: + codecs.lookup_error('surrogateescape') + HAS_SURROGATEESCAPE = True +except LookupError: + HAS_SURROGATEESCAPE = False + + +_COMPOSED_ERROR_HANDLERS = frozenset((None, 'surrogate_or_replace', + 'surrogate_or_strict', + 'surrogate_then_replace')) + + +def to_bytes(obj, encoding='utf-8', errors=None, nonstring='simplerepr'): + """Make sure that a string is a byte string + + :arg obj: An object to make sure is a byte string. In most cases this + will be either a text string or a byte string. However, with + ``nonstring='simplerepr'``, this can be used as a traceback-free + version of ``str(obj)``. + :kwarg encoding: The encoding to use to transform from a text string to + a byte string. Defaults to using 'utf-8'. + :kwarg errors: The error handler to use if the text string is not + encodable using the specified encoding. Any valid `codecs error + handler <https://docs.python.org/3/library/codecs.html#codec-base-classes>`_ + may be specified. There are three additional error strategies + specifically aimed at helping people to port code. The first two are: + + :surrogate_or_strict: Will use ``surrogateescape`` if it is a valid + handler, otherwise it will use ``strict`` + :surrogate_or_replace: Will use ``surrogateescape`` if it is a valid + handler, otherwise it will use ``replace``. + + Because ``surrogateescape`` was added in Python3 this usually means that + Python3 will use ``surrogateescape`` and Python2 will use the fallback + error handler. Note that the code checks for ``surrogateescape`` when the + module is imported. If you have a backport of ``surrogateescape`` for + Python2, be sure to register the error handler prior to importing this + module. + + The last error handler is: + + :surrogate_then_replace: Will use ``surrogateescape`` if it is a valid + handler. If encoding with ``surrogateescape`` would traceback, + surrogates are first replaced with a replacement characters + and then the string is encoded using ``replace`` (which replaces + the rest of the nonencodable bytes). If ``surrogateescape`` is + not present it will simply use ``replace``. (Added in Ansible 2.3) + This strategy is designed to never traceback when it attempts + to encode a string. + + The default until Ansible-2.2 was ``surrogate_or_replace`` + From Ansible-2.3 onwards, the default is ``surrogate_then_replace``. + + :kwarg nonstring: The strategy to use if a nonstring is specified in + ``obj``. Default is 'simplerepr'. Valid values are: + + :simplerepr: The default. This takes the ``str`` of the object and + then returns the bytes version of that string. + :empty: Return an empty byte string + :passthru: Return the object passed in + :strict: Raise a :exc:`TypeError` + + :returns: Typically this returns a byte string. If a nonstring object is + passed in this may be a different type depending on the strategy + specified by nonstring. This will never return a text string. + + .. note:: If passed a byte string, this function does not check that the + string is valid in the specified encoding. If it's important that the + byte string is in the specified encoding do:: + + encoded_string = to_bytes(to_text(input_string, 'latin-1'), 'utf-8') + + .. version_changed:: 2.3 + + Added the ``surrogate_then_replace`` error handler and made it the default error handler. + """ + if isinstance(obj, binary_type): + return obj + + # We're given a text string + # If it has surrogates, we know because it will decode + original_errors = errors + if errors in _COMPOSED_ERROR_HANDLERS: + if HAS_SURROGATEESCAPE: + errors = 'surrogateescape' + elif errors == 'surrogate_or_strict': + errors = 'strict' + else: + errors = 'replace' + + if isinstance(obj, text_type): + try: + # Try this first as it's the fastest + return obj.encode(encoding, errors) + except UnicodeEncodeError: + if original_errors in (None, 'surrogate_then_replace'): + # We should only reach this if encoding was non-utf8 original_errors was + # surrogate_then_escape and errors was surrogateescape + + # Slow but works + return_string = obj.encode('utf-8', 'surrogateescape') + return_string = return_string.decode('utf-8', 'replace') + return return_string.encode(encoding, 'replace') + raise + + # Note: We do these last even though we have to call to_bytes again on the + # value because we're optimizing the common case + if nonstring == 'simplerepr': + try: + value = str(obj) + except UnicodeError: + try: + value = repr(obj) + except UnicodeError: + # Giving up + return to_bytes('') + elif nonstring == 'passthru': + return obj + elif nonstring == 'empty': + # python2.4 doesn't have b'' + return to_bytes('') + elif nonstring == 'strict': + raise TypeError('obj must be a string type') + else: + raise TypeError('Invalid value %s for to_bytes\' nonstring parameter' % nonstring) + + return to_bytes(value, encoding, errors) + + +def to_text(obj, encoding='utf-8', errors=None, nonstring='simplerepr'): + """Make sure that a string is a text string + + :arg obj: An object to make sure is a text string. In most cases this + will be either a text string or a byte string. However, with + ``nonstring='simplerepr'``, this can be used as a traceback-free + version of ``str(obj)``. + :kwarg encoding: The encoding to use to transform from a byte string to + a text string. Defaults to using 'utf-8'. + :kwarg errors: The error handler to use if the byte string is not + decodable using the specified encoding. Any valid `codecs error + handler <https://docs.python.org/3/library/codecs.html#codec-base-classes>`_ + may be specified. We support three additional error strategies + specifically aimed at helping people to port code: + + :surrogate_or_strict: Will use surrogateescape if it is a valid + handler, otherwise it will use strict + :surrogate_or_replace: Will use surrogateescape if it is a valid + handler, otherwise it will use replace. + :surrogate_then_replace: Does the same as surrogate_or_replace but + `was added for symmetry with the error handlers in + :func:`ansible.module_utils._text.to_bytes` (Added in Ansible 2.3) + + Because surrogateescape was added in Python3 this usually means that + Python3 will use `surrogateescape` and Python2 will use the fallback + error handler. Note that the code checks for surrogateescape when the + module is imported. If you have a backport of `surrogateescape` for + python2, be sure to register the error handler prior to importing this + module. + + The default until Ansible-2.2 was `surrogate_or_replace` + In Ansible-2.3 this defaults to `surrogate_then_replace` for symmetry + with :func:`ansible.module_utils._text.to_bytes` . + :kwarg nonstring: The strategy to use if a nonstring is specified in + ``obj``. Default is 'simplerepr'. Valid values are: + + :simplerepr: The default. This takes the ``str`` of the object and + then returns the text version of that string. + :empty: Return an empty text string + :passthru: Return the object passed in + :strict: Raise a :exc:`TypeError` + + :returns: Typically this returns a text string. If a nonstring object is + passed in this may be a different type depending on the strategy + specified by nonstring. This will never return a byte string. + From Ansible-2.3 onwards, the default is `surrogate_then_replace`. + + .. version_changed:: 2.3 + + Added the surrogate_then_replace error handler and made it the default error handler. + """ + if isinstance(obj, text_type): + return obj + + if errors in _COMPOSED_ERROR_HANDLERS: + if HAS_SURROGATEESCAPE: + errors = 'surrogateescape' + elif errors == 'surrogate_or_strict': + errors = 'strict' + else: + errors = 'replace' + + if isinstance(obj, binary_type): + # Note: We don't need special handling for surrogate_then_replace + # because all bytes will either be made into surrogates or are valid + # to decode. + return obj.decode(encoding, errors) + + # Note: We do these last even though we have to call to_text again on the + # value because we're optimizing the common case + if nonstring == 'simplerepr': + try: + value = str(obj) + except UnicodeError: + try: + value = repr(obj) + except UnicodeError: + # Giving up + return u'' + elif nonstring == 'passthru': + return obj + elif nonstring == 'empty': + return u'' + elif nonstring == 'strict': + raise TypeError('obj must be a string type') + else: + raise TypeError('Invalid value %s for to_text\'s nonstring parameter' % nonstring) + + return to_text(value, encoding, errors) + + +#: :py:func:`to_native` +#: Transform a variable into the native str type for the python version +#: +#: On Python2, this is an alias for +#: :func:`~ansible.module_utils.to_bytes`. On Python3 it is an alias for +#: :func:`~ansible.module_utils.to_text`. It makes it easier to +#: transform a variable into the native str type for the python version +#: the code is running on. Use this when constructing the message to +#: send to exceptions or when dealing with an API that needs to take +#: a native string. Example:: +#: +#: try: +#: 1//0 +#: except ZeroDivisionError as e: +#: raise MyException('Encountered and error: %s' % to_native(e)) +if PY3: + to_native = to_text +else: + to_native = to_bytes + + +def _json_encode_fallback(obj): + if isinstance(obj, Set): + return list(obj) + elif isinstance(obj, datetime.datetime): + return obj.isoformat() + raise TypeError("Cannot json serialize %s" % to_native(obj)) + + +def jsonify(data, **kwargs): + for encoding in ("utf-8", "latin-1"): + try: + return json.dumps(data, encoding=encoding, default=_json_encode_fallback, **kwargs) + # Old systems using old simplejson module does not support encoding keyword. + except TypeError: + try: + new_data = container_to_text(data, encoding=encoding) + except UnicodeDecodeError: + continue + return json.dumps(new_data, default=_json_encode_fallback, **kwargs) + except UnicodeDecodeError: + continue + raise UnicodeError('Invalid unicode encoding encountered') + + +def container_to_bytes(d, encoding='utf-8', errors='surrogate_or_strict'): + ''' Recursively convert dict keys and values to byte str + + Specialized for json return because this only handles, lists, tuples, + and dict container types (the containers that the json module returns) + ''' + + if isinstance(d, text_type): + return to_bytes(d, encoding=encoding, errors=errors) + elif isinstance(d, dict): + return dict(container_to_bytes(o, encoding, errors) for o in iteritems(d)) + elif isinstance(d, list): + return [container_to_bytes(o, encoding, errors) for o in d] + elif isinstance(d, tuple): + return tuple(container_to_bytes(o, encoding, errors) for o in d) + else: + return d + + +def container_to_text(d, encoding='utf-8', errors='surrogate_or_strict'): + """Recursively convert dict keys and values to text str + + Specialized for json return because this only handles, lists, tuples, + and dict container types (the containers that the json module returns) + """ + + if isinstance(d, binary_type): + # Warning, can traceback + return to_text(d, encoding=encoding, errors=errors) + elif isinstance(d, dict): + return dict(container_to_text(o, encoding, errors) for o in iteritems(d)) + elif isinstance(d, list): + return [container_to_text(o, encoding, errors) for o in d] + elif isinstance(d, tuple): + return tuple(container_to_text(o, encoding, errors) for o in d) + else: + return d diff --git a/lib/ansible/module_utils/common/text/formatters.py b/lib/ansible/module_utils/common/text/formatters.py new file mode 100644 index 0000000..94ca5a3 --- /dev/null +++ b/lib/ansible/module_utils/common/text/formatters.py @@ -0,0 +1,114 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2019 Ansible Project +# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +import re + +from ansible.module_utils.six import iteritems + +SIZE_RANGES = { + 'Y': 1 << 80, + 'Z': 1 << 70, + 'E': 1 << 60, + 'P': 1 << 50, + 'T': 1 << 40, + 'G': 1 << 30, + 'M': 1 << 20, + 'K': 1 << 10, + 'B': 1, +} + + +def lenient_lowercase(lst): + """Lowercase elements of a list. + + If an element is not a string, pass it through untouched. + """ + lowered = [] + for value in lst: + try: + lowered.append(value.lower()) + except AttributeError: + lowered.append(value) + return lowered + + +def human_to_bytes(number, default_unit=None, isbits=False): + """Convert number in string format into bytes (ex: '2K' => 2048) or using unit argument. + + example: human_to_bytes('10M') <=> human_to_bytes(10, 'M'). + + When isbits is False (default), converts bytes from a human-readable format to integer. + example: human_to_bytes('1MB') returns 1048576 (int). + The function expects 'B' (uppercase) as a byte identifier passed + as a part of 'name' param string or 'unit', e.g. 'MB'/'KB'/etc. + (except when the identifier is single 'b', it is perceived as a byte identifier too). + if 'Mb'/'Kb'/... is passed, the ValueError will be rased. + + When isbits is True, converts bits from a human-readable format to integer. + example: human_to_bytes('1Mb', isbits=True) returns 8388608 (int) - + string bits representation was passed and return as a number or bits. + The function expects 'b' (lowercase) as a bit identifier, e.g. 'Mb'/'Kb'/etc. + if 'MB'/'KB'/... is passed, the ValueError will be rased. + """ + m = re.search(r'^\s*(\d*\.?\d*)\s*([A-Za-z]+)?', str(number), flags=re.IGNORECASE) + if m is None: + raise ValueError("human_to_bytes() can't interpret following string: %s" % str(number)) + try: + num = float(m.group(1)) + except Exception: + raise ValueError("human_to_bytes() can't interpret following number: %s (original input string: %s)" % (m.group(1), number)) + + unit = m.group(2) + if unit is None: + unit = default_unit + + if unit is None: + ''' No unit given, returning raw number ''' + return int(round(num)) + range_key = unit[0].upper() + try: + limit = SIZE_RANGES[range_key] + except Exception: + raise ValueError("human_to_bytes() failed to convert %s (unit = %s). The suffix must be one of %s" % (number, unit, ", ".join(SIZE_RANGES.keys()))) + + # default value + unit_class = 'B' + unit_class_name = 'byte' + # handling bits case + if isbits: + unit_class = 'b' + unit_class_name = 'bit' + # check unit value if more than one character (KB, MB) + if len(unit) > 1: + expect_message = 'expect %s%s or %s' % (range_key, unit_class, range_key) + if range_key == 'B': + expect_message = 'expect %s or %s' % (unit_class, unit_class_name) + + if unit_class_name in unit.lower(): + pass + elif unit[1] != unit_class: + raise ValueError("human_to_bytes() failed to convert %s. Value is not a valid string (%s)" % (number, expect_message)) + + return int(round(num * limit)) + + +def bytes_to_human(size, isbits=False, unit=None): + base = 'Bytes' + if isbits: + base = 'bits' + suffix = '' + + for suffix, limit in sorted(iteritems(SIZE_RANGES), key=lambda item: -item[1]): + if (unit is None and size >= limit) or unit is not None and unit.upper() == suffix[0]: + break + + if limit != 1: + suffix += base[0] + else: + suffix = base + + return '%.2f %s' % (size / limit, suffix) diff --git a/lib/ansible/module_utils/common/validation.py b/lib/ansible/module_utils/common/validation.py new file mode 100644 index 0000000..5a4cebb --- /dev/null +++ b/lib/ansible/module_utils/common/validation.py @@ -0,0 +1,578 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2019 Ansible Project +# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +import os +import re + +from ast import literal_eval +from ansible.module_utils._text import to_native +from ansible.module_utils.common._json_compat import json +from ansible.module_utils.common.collections import is_iterable +from ansible.module_utils.common.text.converters import jsonify +from ansible.module_utils.common.text.formatters import human_to_bytes +from ansible.module_utils.parsing.convert_bool import boolean +from ansible.module_utils.six import ( + binary_type, + integer_types, + string_types, + text_type, +) + + +def count_terms(terms, parameters): + """Count the number of occurrences of a key in a given dictionary + + :arg terms: String or iterable of values to check + :arg parameters: Dictionary of parameters + + :returns: An integer that is the number of occurrences of the terms values + in the provided dictionary. + """ + + if not is_iterable(terms): + terms = [terms] + + return len(set(terms).intersection(parameters)) + + +def safe_eval(value, locals=None, include_exceptions=False): + # do not allow method calls to modules + if not isinstance(value, string_types): + # already templated to a datavaluestructure, perhaps? + if include_exceptions: + return (value, None) + return value + if re.search(r'\w\.\w+\(', value): + if include_exceptions: + return (value, None) + return value + # do not allow imports + if re.search(r'import \w+', value): + if include_exceptions: + return (value, None) + return value + try: + result = literal_eval(value) + if include_exceptions: + return (result, None) + else: + return result + except Exception as e: + if include_exceptions: + return (value, e) + return value + + +def check_mutually_exclusive(terms, parameters, options_context=None): + """Check mutually exclusive terms against argument parameters + + Accepts a single list or list of lists that are groups of terms that should be + mutually exclusive with one another + + :arg terms: List of mutually exclusive parameters + :arg parameters: Dictionary of parameters + :kwarg options_context: List of strings of parent key names if ``terms`` are + in a sub spec. + + :returns: Empty list or raises :class:`TypeError` if the check fails. + """ + + results = [] + if terms is None: + return results + + for check in terms: + count = count_terms(check, parameters) + if count > 1: + results.append(check) + + if results: + full_list = ['|'.join(check) for check in results] + msg = "parameters are mutually exclusive: %s" % ', '.join(full_list) + if options_context: + msg = "{0} found in {1}".format(msg, " -> ".join(options_context)) + raise TypeError(to_native(msg)) + + return results + + +def check_required_one_of(terms, parameters, options_context=None): + """Check each list of terms to ensure at least one exists in the given module + parameters + + Accepts a list of lists or tuples + + :arg terms: List of lists of terms to check. For each list of terms, at + least one is required. + :arg parameters: Dictionary of parameters + :kwarg options_context: List of strings of parent key names if ``terms`` are + in a sub spec. + + :returns: Empty list or raises :class:`TypeError` if the check fails. + """ + + results = [] + if terms is None: + return results + + for term in terms: + count = count_terms(term, parameters) + if count == 0: + results.append(term) + + if results: + for term in results: + msg = "one of the following is required: %s" % ', '.join(term) + if options_context: + msg = "{0} found in {1}".format(msg, " -> ".join(options_context)) + raise TypeError(to_native(msg)) + + return results + + +def check_required_together(terms, parameters, options_context=None): + """Check each list of terms to ensure every parameter in each list exists + in the given parameters. + + Accepts a list of lists or tuples. + + :arg terms: List of lists of terms to check. Each list should include + parameters that are all required when at least one is specified + in the parameters. + :arg parameters: Dictionary of parameters + :kwarg options_context: List of strings of parent key names if ``terms`` are + in a sub spec. + + :returns: Empty list or raises :class:`TypeError` if the check fails. + """ + + results = [] + if terms is None: + return results + + for term in terms: + counts = [count_terms(field, parameters) for field in term] + non_zero = [c for c in counts if c > 0] + if len(non_zero) > 0: + if 0 in counts: + results.append(term) + if results: + for term in results: + msg = "parameters are required together: %s" % ', '.join(term) + if options_context: + msg = "{0} found in {1}".format(msg, " -> ".join(options_context)) + raise TypeError(to_native(msg)) + + return results + + +def check_required_by(requirements, parameters, options_context=None): + """For each key in requirements, check the corresponding list to see if they + exist in parameters. + + Accepts a single string or list of values for each key. + + :arg requirements: Dictionary of requirements + :arg parameters: Dictionary of parameters + :kwarg options_context: List of strings of parent key names if ``requirements`` are + in a sub spec. + + :returns: Empty dictionary or raises :class:`TypeError` if the + """ + + result = {} + if requirements is None: + return result + + for (key, value) in requirements.items(): + if key not in parameters or parameters[key] is None: + continue + result[key] = [] + # Support strings (single-item lists) + if isinstance(value, string_types): + value = [value] + for required in value: + if required not in parameters or parameters[required] is None: + result[key].append(required) + + if result: + for key, missing in result.items(): + if len(missing) > 0: + msg = "missing parameter(s) required by '%s': %s" % (key, ', '.join(missing)) + if options_context: + msg = "{0} found in {1}".format(msg, " -> ".join(options_context)) + raise TypeError(to_native(msg)) + + return result + + +def check_required_arguments(argument_spec, parameters, options_context=None): + """Check all parameters in argument_spec and return a list of parameters + that are required but not present in parameters. + + Raises :class:`TypeError` if the check fails + + :arg argument_spec: Argument spec dictionary containing all parameters + and their specification + :arg parameters: Dictionary of parameters + :kwarg options_context: List of strings of parent key names if ``argument_spec`` are + in a sub spec. + + :returns: Empty list or raises :class:`TypeError` if the check fails. + """ + + missing = [] + if argument_spec is None: + return missing + + for (k, v) in argument_spec.items(): + required = v.get('required', False) + if required and k not in parameters: + missing.append(k) + + if missing: + msg = "missing required arguments: %s" % ", ".join(sorted(missing)) + if options_context: + msg = "{0} found in {1}".format(msg, " -> ".join(options_context)) + raise TypeError(to_native(msg)) + + return missing + + +def check_required_if(requirements, parameters, options_context=None): + """Check parameters that are conditionally required + + Raises :class:`TypeError` if the check fails + + :arg requirements: List of lists specifying a parameter, value, parameters + required when the given parameter is the specified value, and optionally + a boolean indicating any or all parameters are required. + + :Example: + + .. code-block:: python + + required_if=[ + ['state', 'present', ('path',), True], + ['someint', 99, ('bool_param', 'string_param')], + ] + + :arg parameters: Dictionary of parameters + + :returns: Empty list or raises :class:`TypeError` if the check fails. + The results attribute of the exception contains a list of dictionaries. + Each dictionary is the result of evaluating each item in requirements. + Each return dictionary contains the following keys: + + :key missing: List of parameters that are required but missing + :key requires: 'any' or 'all' + :key parameter: Parameter name that has the requirement + :key value: Original value of the parameter + :key requirements: Original required parameters + + :Example: + + .. code-block:: python + + [ + { + 'parameter': 'someint', + 'value': 99 + 'requirements': ('bool_param', 'string_param'), + 'missing': ['string_param'], + 'requires': 'all', + } + ] + + :kwarg options_context: List of strings of parent key names if ``requirements`` are + in a sub spec. + """ + results = [] + if requirements is None: + return results + + for req in requirements: + missing = {} + missing['missing'] = [] + max_missing_count = 0 + is_one_of = False + if len(req) == 4: + key, val, requirements, is_one_of = req + else: + key, val, requirements = req + + # is_one_of is True at least one requirement should be + # present, else all requirements should be present. + if is_one_of: + max_missing_count = len(requirements) + missing['requires'] = 'any' + else: + missing['requires'] = 'all' + + if key in parameters and parameters[key] == val: + for check in requirements: + count = count_terms(check, parameters) + if count == 0: + missing['missing'].append(check) + if len(missing['missing']) and len(missing['missing']) >= max_missing_count: + missing['parameter'] = key + missing['value'] = val + missing['requirements'] = requirements + results.append(missing) + + if results: + for missing in results: + msg = "%s is %s but %s of the following are missing: %s" % ( + missing['parameter'], missing['value'], missing['requires'], ', '.join(missing['missing'])) + if options_context: + msg = "{0} found in {1}".format(msg, " -> ".join(options_context)) + raise TypeError(to_native(msg)) + + return results + + +def check_missing_parameters(parameters, required_parameters=None): + """This is for checking for required params when we can not check via + argspec because we need more information than is simply given in the argspec. + + Raises :class:`TypeError` if any required parameters are missing + + :arg parameters: Dictionary of parameters + :arg required_parameters: List of parameters to look for in the given parameters. + + :returns: Empty list or raises :class:`TypeError` if the check fails. + """ + missing_params = [] + if required_parameters is None: + return missing_params + + for param in required_parameters: + if not parameters.get(param): + missing_params.append(param) + + if missing_params: + msg = "missing required arguments: %s" % ', '.join(missing_params) + raise TypeError(to_native(msg)) + + return missing_params + + +# FIXME: The param and prefix parameters here are coming from AnsibleModule._check_type_string() +# which is using those for the warning messaged based on string conversion warning settings. +# Not sure how to deal with that here since we don't have config state to query. +def check_type_str(value, allow_conversion=True, param=None, prefix=''): + """Verify that the value is a string or convert to a string. + + Since unexpected changes can sometimes happen when converting to a string, + ``allow_conversion`` controls whether or not the value will be converted or a + TypeError will be raised if the value is not a string and would be converted + + :arg value: Value to validate or convert to a string + :arg allow_conversion: Whether to convert the string and return it or raise + a TypeError + + :returns: Original value if it is a string, the value converted to a string + if allow_conversion=True, or raises a TypeError if allow_conversion=False. + """ + if isinstance(value, string_types): + return value + + if allow_conversion: + return to_native(value, errors='surrogate_or_strict') + + msg = "'{0!r}' is not a string and conversion is not allowed".format(value) + raise TypeError(to_native(msg)) + + +def check_type_list(value): + """Verify that the value is a list or convert to a list + + A comma separated string will be split into a list. Raises a :class:`TypeError` + if unable to convert to a list. + + :arg value: Value to validate or convert to a list + + :returns: Original value if it is already a list, single item list if a + float, int, or string without commas, or a multi-item list if a + comma-delimited string. + """ + if isinstance(value, list): + return value + + if isinstance(value, string_types): + return value.split(",") + elif isinstance(value, int) or isinstance(value, float): + return [str(value)] + + raise TypeError('%s cannot be converted to a list' % type(value)) + + +def check_type_dict(value): + """Verify that value is a dict or convert it to a dict and return it. + + Raises :class:`TypeError` if unable to convert to a dict + + :arg value: Dict or string to convert to a dict. Accepts ``k1=v2, k2=v2``. + + :returns: value converted to a dictionary + """ + if isinstance(value, dict): + return value + + if isinstance(value, string_types): + if value.startswith("{"): + try: + return json.loads(value) + except Exception: + (result, exc) = safe_eval(value, dict(), include_exceptions=True) + if exc is not None: + raise TypeError('unable to evaluate string as dictionary') + return result + elif '=' in value: + fields = [] + field_buffer = [] + in_quote = False + in_escape = False + for c in value.strip(): + if in_escape: + field_buffer.append(c) + in_escape = False + elif c == '\\': + in_escape = True + elif not in_quote and c in ('\'', '"'): + in_quote = c + elif in_quote and in_quote == c: + in_quote = False + elif not in_quote and c in (',', ' '): + field = ''.join(field_buffer) + if field: + fields.append(field) + field_buffer = [] + else: + field_buffer.append(c) + + field = ''.join(field_buffer) + if field: + fields.append(field) + return dict(x.split("=", 1) for x in fields) + else: + raise TypeError("dictionary requested, could not parse JSON or key=value") + + raise TypeError('%s cannot be converted to a dict' % type(value)) + + +def check_type_bool(value): + """Verify that the value is a bool or convert it to a bool and return it. + + Raises :class:`TypeError` if unable to convert to a bool + + :arg value: String, int, or float to convert to bool. Valid booleans include: + '1', 'on', 1, '0', 0, 'n', 'f', 'false', 'true', 'y', 't', 'yes', 'no', 'off' + + :returns: Boolean True or False + """ + if isinstance(value, bool): + return value + + if isinstance(value, string_types) or isinstance(value, (int, float)): + return boolean(value) + + raise TypeError('%s cannot be converted to a bool' % type(value)) + + +def check_type_int(value): + """Verify that the value is an integer and return it or convert the value + to an integer and return it + + Raises :class:`TypeError` if unable to convert to an int + + :arg value: String or int to convert of verify + + :return: int of given value + """ + if isinstance(value, integer_types): + return value + + if isinstance(value, string_types): + try: + return int(value) + except ValueError: + pass + + raise TypeError('%s cannot be converted to an int' % type(value)) + + +def check_type_float(value): + """Verify that value is a float or convert it to a float and return it + + Raises :class:`TypeError` if unable to convert to a float + + :arg value: float, int, str, or bytes to verify or convert and return. + + :returns: float of given value. + """ + if isinstance(value, float): + return value + + if isinstance(value, (binary_type, text_type, int)): + try: + return float(value) + except ValueError: + pass + + raise TypeError('%s cannot be converted to a float' % type(value)) + + +def check_type_path(value,): + """Verify the provided value is a string or convert it to a string, + then return the expanded path + """ + value = check_type_str(value) + return os.path.expanduser(os.path.expandvars(value)) + + +def check_type_raw(value): + """Returns the raw value""" + return value + + +def check_type_bytes(value): + """Convert a human-readable string value to bytes + + Raises :class:`TypeError` if unable to covert the value + """ + try: + return human_to_bytes(value) + except ValueError: + raise TypeError('%s cannot be converted to a Byte value' % type(value)) + + +def check_type_bits(value): + """Convert a human-readable string bits value to bits in integer. + + Example: ``check_type_bits('1Mb')`` returns integer 1048576. + + Raises :class:`TypeError` if unable to covert the value. + """ + try: + return human_to_bytes(value, isbits=True) + except ValueError: + raise TypeError('%s cannot be converted to a Bit value' % type(value)) + + +def check_type_jsonarg(value): + """Return a jsonified string. Sometimes the controller turns a json string + into a dict/list so transform it back into json here + + Raises :class:`TypeError` if unable to covert the value + + """ + if isinstance(value, (text_type, binary_type)): + return value.strip() + elif isinstance(value, (list, tuple, dict)): + return jsonify(value) + raise TypeError('%s cannot be converted to a json string' % type(value)) diff --git a/lib/ansible/module_utils/common/warnings.py b/lib/ansible/module_utils/common/warnings.py new file mode 100644 index 0000000..9423e6a --- /dev/null +++ b/lib/ansible/module_utils/common/warnings.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2019 Ansible Project +# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +from ansible.module_utils.six import string_types + +_global_warnings = [] +_global_deprecations = [] + + +def warn(warning): + if isinstance(warning, string_types): + _global_warnings.append(warning) + else: + raise TypeError("warn requires a string not a %s" % type(warning)) + + +def deprecate(msg, version=None, date=None, collection_name=None): + if isinstance(msg, string_types): + # For compatibility, we accept that neither version nor date is set, + # and treat that the same as if version would haven been set + if date is not None: + _global_deprecations.append({'msg': msg, 'date': date, 'collection_name': collection_name}) + else: + _global_deprecations.append({'msg': msg, 'version': version, 'collection_name': collection_name}) + else: + raise TypeError("deprecate requires a string not a %s" % type(msg)) + + +def get_warning_messages(): + """Return a tuple of warning messages accumulated over this run""" + return tuple(_global_warnings) + + +def get_deprecation_messages(): + """Return a tuple of deprecations accumulated over this run""" + return tuple(_global_deprecations) diff --git a/lib/ansible/module_utils/common/yaml.py b/lib/ansible/module_utils/common/yaml.py new file mode 100644 index 0000000..e79cc09 --- /dev/null +++ b/lib/ansible/module_utils/common/yaml.py @@ -0,0 +1,48 @@ +# (c) 2020 Matt Martz <matt@sivel.net> +# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause) + +""" +This file provides ease of use shortcuts for loading and dumping YAML, +preferring the YAML compiled C extensions to reduce duplicated code. +""" + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +from functools import partial as _partial + +HAS_LIBYAML = False + +try: + import yaml as _yaml +except ImportError: + HAS_YAML = False +else: + HAS_YAML = True + +if HAS_YAML: + try: + from yaml import CSafeLoader as SafeLoader + from yaml import CSafeDumper as SafeDumper + from yaml.cyaml import CParser as Parser + + HAS_LIBYAML = True + except (ImportError, AttributeError): + from yaml import SafeLoader # type: ignore[misc] + from yaml import SafeDumper # type: ignore[misc] + from yaml.parser import Parser # type: ignore[misc] + + yaml_load = _partial(_yaml.load, Loader=SafeLoader) + yaml_load_all = _partial(_yaml.load_all, Loader=SafeLoader) + + yaml_dump = _partial(_yaml.dump, Dumper=SafeDumper) + yaml_dump_all = _partial(_yaml.dump_all, Dumper=SafeDumper) +else: + SafeLoader = object # type: ignore[assignment,misc] + SafeDumper = object # type: ignore[assignment,misc] + Parser = object # type: ignore[assignment,misc] + + yaml_load = None # type: ignore[assignment] + yaml_load_all = None # type: ignore[assignment] + yaml_dump = None # type: ignore[assignment] + yaml_dump_all = None # type: ignore[assignment] |