diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-28 16:04:21 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-28 16:04:21 +0000 |
commit | 8a754e0858d922e955e71b253c139e071ecec432 (patch) | |
tree | 527d16e74bfd1840c85efd675fdecad056c54107 /lib/ansible/playbook | |
parent | Initial commit. (diff) | |
download | ansible-core-upstream/2.14.3.tar.xz ansible-core-upstream/2.14.3.zip |
Adding upstream version 2.14.3.upstream/2.14.3upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'lib/ansible/playbook')
23 files changed, 5622 insertions, 0 deletions
diff --git a/lib/ansible/playbook/__init__.py b/lib/ansible/playbook/__init__.py new file mode 100644 index 0000000..0ab2271 --- /dev/null +++ b/lib/ansible/playbook/__init__.py @@ -0,0 +1,117 @@ +# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os + +from ansible import constants as C +from ansible.errors import AnsibleParserError +from ansible.module_utils._text import to_text, to_native +from ansible.playbook.play import Play +from ansible.playbook.playbook_include import PlaybookInclude +from ansible.plugins.loader import add_all_plugin_dirs +from ansible.utils.display import Display +from ansible.utils.path import unfrackpath + +display = Display() + + +__all__ = ['Playbook'] + + +class Playbook: + + def __init__(self, loader): + # Entries in the datastructure of a playbook may + # be either a play or an include statement + self._entries = [] + self._basedir = to_text(os.getcwd(), errors='surrogate_or_strict') + self._loader = loader + self._file_name = None + + @staticmethod + def load(file_name, variable_manager=None, loader=None): + pb = Playbook(loader=loader) + pb._load_playbook_data(file_name=file_name, variable_manager=variable_manager) + return pb + + def _load_playbook_data(self, file_name, variable_manager, vars=None): + + if os.path.isabs(file_name): + self._basedir = os.path.dirname(file_name) + else: + self._basedir = os.path.normpath(os.path.join(self._basedir, os.path.dirname(file_name))) + + # set the loaders basedir + cur_basedir = self._loader.get_basedir() + self._loader.set_basedir(self._basedir) + + add_all_plugin_dirs(self._basedir) + + self._file_name = file_name + + try: + ds = self._loader.load_from_file(os.path.basename(file_name)) + except UnicodeDecodeError as e: + raise AnsibleParserError("Could not read playbook (%s) due to encoding issues: %s" % (file_name, to_native(e))) + + # check for errors and restore the basedir in case this error is caught and handled + if ds is None: + self._loader.set_basedir(cur_basedir) + raise AnsibleParserError("Empty playbook, nothing to do: %s" % unfrackpath(file_name), obj=ds) + elif not isinstance(ds, list): + self._loader.set_basedir(cur_basedir) + raise AnsibleParserError("A playbook must be a list of plays, got a %s instead: %s" % (type(ds), unfrackpath(file_name)), obj=ds) + elif not ds: + self._loader.set_basedir(cur_basedir) + raise AnsibleParserError("A playbook must contain at least one play: %s" % unfrackpath(file_name)) + + # Parse the playbook entries. For plays, we simply parse them + # using the Play() object, and includes are parsed using the + # PlaybookInclude() object + for entry in ds: + if not isinstance(entry, dict): + # restore the basedir in case this error is caught and handled + self._loader.set_basedir(cur_basedir) + raise AnsibleParserError("playbook entries must be either valid plays or 'import_playbook' statements", obj=entry) + + if any(action in entry for action in C._ACTION_IMPORT_PLAYBOOK): + pb = PlaybookInclude.load(entry, basedir=self._basedir, variable_manager=variable_manager, loader=self._loader) + if pb is not None: + self._entries.extend(pb._entries) + else: + which = entry + for k in C._ACTION_IMPORT_PLAYBOOK: + if k in entry: + which = entry[k] + break + display.display("skipping playbook '%s' due to conditional test failure" % which, color=C.COLOR_SKIP) + else: + entry_obj = Play.load(entry, variable_manager=variable_manager, loader=self._loader, vars=vars) + self._entries.append(entry_obj) + + # we're done, so restore the old basedir in the loader + self._loader.set_basedir(cur_basedir) + + def get_loader(self): + return self._loader + + def get_plays(self): + return self._entries[:] diff --git a/lib/ansible/playbook/attribute.py b/lib/ansible/playbook/attribute.py new file mode 100644 index 0000000..b28405d --- /dev/null +++ b/lib/ansible/playbook/attribute.py @@ -0,0 +1,202 @@ +# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +from copy import copy, deepcopy + +from ansible.utils.sentinel import Sentinel + +_CONTAINERS = frozenset(('list', 'dict', 'set')) + + +class Attribute: + + def __init__( + self, + isa=None, + private=False, + default=None, + required=False, + listof=None, + priority=0, + class_type=None, + always_post_validate=False, + alias=None, + static=False, + ): + + """ + :class:`Attribute` specifies constraints for attributes of objects which + derive from playbook data. The attributes of the object are basically + a schema for the yaml playbook. + + :kwarg isa: The type of the attribute. Allowable values are a string + representation of any yaml basic datatype, python class, or percent. + (Enforced at post-validation time). + :kwarg private: Not used at runtime. The docs playbook keyword dumper uses it to determine + that a keyword should not be documented. mpdehaan had plans to remove attributes marked + private from the ds so they would not have been available at all. + :kwarg default: Default value if unspecified in the YAML document. + :kwarg required: Whether or not the YAML document must contain this field. + If the attribute is None when post-validated, an error will be raised. + :kwarg listof: If isa is set to "list", this can optionally be set to + ensure that all elements in the list are of the given type. Valid + values here are the same as those for isa. + :kwarg priority: The order in which the fields should be parsed. Generally + this does not need to be set, it is for rare situations where another + field depends on the fact that another field was parsed first. + :kwarg class_type: If isa is set to "class", this can be optionally set to + a class (not a string name). The YAML data for this field will be + passed to the __init__ method of that class during post validation and + the field will be an instance of that class. + :kwarg always_post_validate: Controls whether a field should be post + validated or not (default: False). + :kwarg alias: An alias to use for the attribute name, for situations where + the attribute name may conflict with a Python reserved word. + """ + + self.isa = isa + self.private = private + self.default = default + self.required = required + self.listof = listof + self.priority = priority + self.class_type = class_type + self.always_post_validate = always_post_validate + self.alias = alias + self.static = static + + if default is not None and self.isa in _CONTAINERS and not callable(default): + raise TypeError('defaults for FieldAttribute may not be mutable, please provide a callable instead') + + def __set_name__(self, owner, name): + self.name = name + + def __eq__(self, other): + return other.priority == self.priority + + def __ne__(self, other): + return other.priority != self.priority + + # NB: higher priority numbers sort first + + def __lt__(self, other): + return other.priority < self.priority + + def __gt__(self, other): + return other.priority > self.priority + + def __le__(self, other): + return other.priority <= self.priority + + def __ge__(self, other): + return other.priority >= self.priority + + def __get__(self, obj, obj_type=None): + method = f'_get_attr_{self.name}' + if hasattr(obj, method): + # NOTE this appears to be not used in the codebase, + # _get_attr_connection has been replaced by ConnectionFieldAttribute. + # Leaving it here for test_attr_method from + # test/units/playbook/test_base.py to pass and for backwards compat. + if getattr(obj, '_squashed', False): + value = getattr(obj, f'_{self.name}', Sentinel) + else: + value = getattr(obj, method)() + else: + value = getattr(obj, f'_{self.name}', Sentinel) + + if value is Sentinel: + value = self.default + if callable(value): + value = value() + setattr(obj, f'_{self.name}', value) + + return value + + def __set__(self, obj, value): + setattr(obj, f'_{self.name}', value) + if self.alias is not None: + setattr(obj, f'_{self.alias}', value) + + # NOTE this appears to be not needed in the codebase, + # leaving it here for test_attr_int_del from + # test/units/playbook/test_base.py to pass. + def __delete__(self, obj): + delattr(obj, f'_{self.name}') + + +class NonInheritableFieldAttribute(Attribute): + ... + + +class FieldAttribute(Attribute): + def __init__(self, extend=False, prepend=False, **kwargs): + super().__init__(**kwargs) + + self.extend = extend + self.prepend = prepend + + def __get__(self, obj, obj_type=None): + if getattr(obj, '_squashed', False) or getattr(obj, '_finalized', False): + value = getattr(obj, f'_{self.name}', Sentinel) + else: + try: + value = obj._get_parent_attribute(self.name) + except AttributeError: + method = f'_get_attr_{self.name}' + if hasattr(obj, method): + # NOTE this appears to be not needed in the codebase, + # _get_attr_connection has been replaced by ConnectionFieldAttribute. + # Leaving it here for test_attr_method from + # test/units/playbook/test_base.py to pass and for backwards compat. + if getattr(obj, '_squashed', False): + value = getattr(obj, f'_{self.name}', Sentinel) + else: + value = getattr(obj, method)() + else: + value = getattr(obj, f'_{self.name}', Sentinel) + + if value is Sentinel: + value = self.default + if callable(value): + value = value() + setattr(obj, f'_{self.name}', value) + + return value + + +class ConnectionFieldAttribute(FieldAttribute): + def __get__(self, obj, obj_type=None): + from ansible.module_utils.compat.paramiko import paramiko + from ansible.utils.ssh_functions import check_for_controlpersist + value = super().__get__(obj, obj_type) + + if value == 'smart': + value = 'ssh' + # see if SSH can support ControlPersist if not use paramiko + if not check_for_controlpersist('ssh') and paramiko is not None: + value = "paramiko" + + # if someone did `connection: persistent`, default it to using a persistent paramiko connection to avoid problems + elif value == 'persistent' and paramiko is not None: + value = 'paramiko' + + return value diff --git a/lib/ansible/playbook/base.py b/lib/ansible/playbook/base.py new file mode 100644 index 0000000..669aa0a --- /dev/null +++ b/lib/ansible/playbook/base.py @@ -0,0 +1,775 @@ +# Copyright: (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> +# Copyright: (c) 2017, Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import itertools +import operator +import os + +from copy import copy as shallowcopy + +from jinja2.exceptions import UndefinedError + +from ansible import constants as C +from ansible import context +from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable, AnsibleAssertionError +from ansible.module_utils.six import string_types +from ansible.module_utils.parsing.convert_bool import boolean +from ansible.module_utils._text import to_text, to_native +from ansible.parsing.dataloader import DataLoader +from ansible.playbook.attribute import Attribute, FieldAttribute, ConnectionFieldAttribute, NonInheritableFieldAttribute +from ansible.plugins.loader import module_loader, action_loader +from ansible.utils.collection_loader._collection_finder import _get_collection_metadata, AnsibleCollectionRef +from ansible.utils.display import Display +from ansible.utils.sentinel import Sentinel +from ansible.utils.vars import combine_vars, isidentifier, get_unique_id + +display = Display() + + +def _validate_action_group_metadata(action, found_group_metadata, fq_group_name): + valid_metadata = { + 'extend_group': { + 'types': (list, string_types,), + 'errortype': 'list', + }, + } + + metadata_warnings = [] + + validate = C.VALIDATE_ACTION_GROUP_METADATA + metadata_only = isinstance(action, dict) and 'metadata' in action and len(action) == 1 + + if validate and not metadata_only: + found_keys = ', '.join(sorted(list(action))) + metadata_warnings.append("The only expected key is metadata, but got keys: {keys}".format(keys=found_keys)) + elif validate: + if found_group_metadata: + metadata_warnings.append("The group contains multiple metadata entries.") + if not isinstance(action['metadata'], dict): + metadata_warnings.append("The metadata is not a dictionary. Got {metadata}".format(metadata=action['metadata'])) + else: + unexpected_keys = set(action['metadata'].keys()) - set(valid_metadata.keys()) + if unexpected_keys: + metadata_warnings.append("The metadata contains unexpected keys: {0}".format(', '.join(unexpected_keys))) + unexpected_types = [] + for field, requirement in valid_metadata.items(): + if field not in action['metadata']: + continue + value = action['metadata'][field] + if not isinstance(value, requirement['types']): + unexpected_types.append("%s is %s (expected type %s)" % (field, value, requirement['errortype'])) + if unexpected_types: + metadata_warnings.append("The metadata contains unexpected key types: {0}".format(', '.join(unexpected_types))) + if metadata_warnings: + metadata_warnings.insert(0, "Invalid metadata was found for action_group {0} while loading module_defaults.".format(fq_group_name)) + display.warning(" ".join(metadata_warnings)) + + +class FieldAttributeBase: + + @classmethod + @property + def fattributes(cls): + # FIXME is this worth caching? + fattributes = {} + for class_obj in reversed(cls.__mro__): + for name, attr in list(class_obj.__dict__.items()): + if not isinstance(attr, Attribute): + continue + fattributes[name] = attr + if attr.alias: + setattr(class_obj, attr.alias, attr) + fattributes[attr.alias] = attr + return fattributes + + def __init__(self): + + # initialize the data loader and variable manager, which will be provided + # later when the object is actually loaded + self._loader = None + self._variable_manager = None + + # other internal params + self._validated = False + self._squashed = False + self._finalized = False + + # every object gets a random uuid: + self._uuid = get_unique_id() + + # init vars, avoid using defaults in field declaration as it lives across plays + self.vars = dict() + + @property + def finalized(self): + return self._finalized + + def dump_me(self, depth=0): + ''' this is never called from production code, it is here to be used when debugging as a 'complex print' ''' + if depth == 0: + display.debug("DUMPING OBJECT ------------------------------------------------------") + display.debug("%s- %s (%s, id=%s)" % (" " * depth, self.__class__.__name__, self, id(self))) + if hasattr(self, '_parent') and self._parent: + self._parent.dump_me(depth + 2) + dep_chain = self._parent.get_dep_chain() + if dep_chain: + for dep in dep_chain: + dep.dump_me(depth + 2) + if hasattr(self, '_play') and self._play: + self._play.dump_me(depth + 2) + + def preprocess_data(self, ds): + ''' infrequently used method to do some pre-processing of legacy terms ''' + return ds + + def load_data(self, ds, variable_manager=None, loader=None): + ''' walk the input datastructure and assign any values ''' + + if ds is None: + raise AnsibleAssertionError('ds (%s) should not be None but it is.' % ds) + + # cache the datastructure internally + setattr(self, '_ds', ds) + + # the variable manager class is used to manage and merge variables + # down to a single dictionary for reference in templating, etc. + self._variable_manager = variable_manager + + # the data loader class is used to parse data from strings and files + if loader is not None: + self._loader = loader + else: + self._loader = DataLoader() + + # call the preprocess_data() function to massage the data into + # something we can more easily parse, and then call the validation + # function on it to ensure there are no incorrect key values + ds = self.preprocess_data(ds) + self._validate_attributes(ds) + + # Walk all attributes in the class. We sort them based on their priority + # so that certain fields can be loaded before others, if they are dependent. + for name, attr in sorted(self.fattributes.items(), key=operator.itemgetter(1)): + # copy the value over unless a _load_field method is defined + if name in ds: + method = getattr(self, '_load_%s' % name, None) + if method: + setattr(self, name, method(name, ds[name])) + else: + setattr(self, name, ds[name]) + + # run early, non-critical validation + self.validate() + + # return the constructed object + return self + + def get_ds(self): + try: + return getattr(self, '_ds') + except AttributeError: + return None + + def get_loader(self): + return self._loader + + def get_variable_manager(self): + return self._variable_manager + + def _post_validate_debugger(self, attr, value, templar): + value = templar.template(value) + valid_values = frozenset(('always', 'on_failed', 'on_unreachable', 'on_skipped', 'never')) + if value and isinstance(value, string_types) and value not in valid_values: + raise AnsibleParserError("'%s' is not a valid value for debugger. Must be one of %s" % (value, ', '.join(valid_values)), obj=self.get_ds()) + return value + + def _validate_attributes(self, ds): + ''' + Ensures that there are no keys in the datastructure which do + not map to attributes for this object. + ''' + + valid_attrs = frozenset(self.fattributes) + for key in ds: + if key not in valid_attrs: + raise AnsibleParserError("'%s' is not a valid attribute for a %s" % (key, self.__class__.__name__), obj=ds) + + def validate(self, all_vars=None): + ''' validation that is done at parse time, not load time ''' + all_vars = {} if all_vars is None else all_vars + + if not self._validated: + # walk all fields in the object + for (name, attribute) in self.fattributes.items(): + # run validator only if present + method = getattr(self, '_validate_%s' % name, None) + if method: + method(attribute, name, getattr(self, name)) + else: + # and make sure the attribute is of the type it should be + value = getattr(self, f'_{name}', Sentinel) + if value is not None: + if attribute.isa == 'string' and isinstance(value, (list, dict)): + raise AnsibleParserError( + "The field '%s' is supposed to be a string type," + " however the incoming data structure is a %s" % (name, type(value)), obj=self.get_ds() + ) + + self._validated = True + + def _load_module_defaults(self, name, value): + if value is None: + return + + if not isinstance(value, list): + value = [value] + + validated_module_defaults = [] + for defaults_dict in value: + if not isinstance(defaults_dict, dict): + raise AnsibleParserError( + "The field 'module_defaults' is supposed to be a dictionary or list of dictionaries, " + "the keys of which must be static action, module, or group names. Only the values may contain " + "templates. For example: {'ping': \"{{ ping_defaults }}\"}" + ) + + validated_defaults_dict = {} + for defaults_entry, defaults in defaults_dict.items(): + # module_defaults do not use the 'collections' keyword, so actions and + # action_groups that are not fully qualified are part of the 'ansible.legacy' + # collection. Update those entries here, so module_defaults contains + # fully qualified entries. + if defaults_entry.startswith('group/'): + group_name = defaults_entry.split('group/')[-1] + + # The resolved action_groups cache is associated saved on the current Play + if self.play is not None: + group_name, dummy = self._resolve_group(group_name) + + defaults_entry = 'group/' + group_name + validated_defaults_dict[defaults_entry] = defaults + + else: + if len(defaults_entry.split('.')) < 3: + defaults_entry = 'ansible.legacy.' + defaults_entry + + resolved_action = self._resolve_action(defaults_entry) + if resolved_action: + validated_defaults_dict[resolved_action] = defaults + + # If the defaults_entry is an ansible.legacy plugin, these defaults + # are inheritable by the 'ansible.builtin' subset, but are not + # required to exist. + if defaults_entry.startswith('ansible.legacy.'): + resolved_action = self._resolve_action( + defaults_entry.replace('ansible.legacy.', 'ansible.builtin.'), + mandatory=False + ) + if resolved_action: + validated_defaults_dict[resolved_action] = defaults + + validated_module_defaults.append(validated_defaults_dict) + + return validated_module_defaults + + @property + def play(self): + if hasattr(self, '_play'): + play = self._play + elif hasattr(self, '_parent') and hasattr(self._parent, '_play'): + play = self._parent._play + else: + play = self + + if play.__class__.__name__ != 'Play': + # Should never happen, but handle gracefully by returning None, just in case + return None + + return play + + def _resolve_group(self, fq_group_name, mandatory=True): + if not AnsibleCollectionRef.is_valid_fqcr(fq_group_name): + collection_name = 'ansible.builtin' + fq_group_name = collection_name + '.' + fq_group_name + else: + collection_name = '.'.join(fq_group_name.split('.')[0:2]) + + # Check if the group has already been resolved and cached + if fq_group_name in self.play._group_actions: + return fq_group_name, self.play._group_actions[fq_group_name] + + try: + action_groups = _get_collection_metadata(collection_name).get('action_groups', {}) + except ValueError: + if not mandatory: + display.vvvvv("Error loading module_defaults: could not resolve the module_defaults group %s" % fq_group_name) + return fq_group_name, [] + + raise AnsibleParserError("Error loading module_defaults: could not resolve the module_defaults group %s" % fq_group_name) + + # The collection may or may not use the fully qualified name + # Don't fail if the group doesn't exist in the collection + resource_name = fq_group_name.split(collection_name + '.')[-1] + action_group = action_groups.get( + fq_group_name, + action_groups.get(resource_name) + ) + if action_group is None: + if not mandatory: + display.vvvvv("Error loading module_defaults: could not resolve the module_defaults group %s" % fq_group_name) + return fq_group_name, [] + raise AnsibleParserError("Error loading module_defaults: could not resolve the module_defaults group %s" % fq_group_name) + + resolved_actions = [] + include_groups = [] + + found_group_metadata = False + for action in action_group: + # Everything should be a string except the metadata entry + if not isinstance(action, string_types): + _validate_action_group_metadata(action, found_group_metadata, fq_group_name) + + if isinstance(action['metadata'], dict): + found_group_metadata = True + + include_groups = action['metadata'].get('extend_group', []) + if isinstance(include_groups, string_types): + include_groups = [include_groups] + if not isinstance(include_groups, list): + # Bad entries may be a warning above, but prevent tracebacks by setting it back to the acceptable type. + include_groups = [] + continue + + # The collection may or may not use the fully qualified name. + # If not, it's part of the current collection. + if not AnsibleCollectionRef.is_valid_fqcr(action): + action = collection_name + '.' + action + resolved_action = self._resolve_action(action, mandatory=False) + if resolved_action: + resolved_actions.append(resolved_action) + + for action in resolved_actions: + if action not in self.play._action_groups: + self.play._action_groups[action] = [] + self.play._action_groups[action].append(fq_group_name) + + self.play._group_actions[fq_group_name] = resolved_actions + + # Resolve extended groups last, after caching the group in case they recursively refer to each other + for include_group in include_groups: + if not AnsibleCollectionRef.is_valid_fqcr(include_group): + include_group = collection_name + '.' + include_group + + dummy, group_actions = self._resolve_group(include_group, mandatory=False) + + for action in group_actions: + if action not in self.play._action_groups: + self.play._action_groups[action] = [] + self.play._action_groups[action].append(fq_group_name) + + self.play._group_actions[fq_group_name].extend(group_actions) + resolved_actions.extend(group_actions) + + return fq_group_name, resolved_actions + + def _resolve_action(self, action_name, mandatory=True): + context = module_loader.find_plugin_with_context(action_name) + if context.resolved and not context.action_plugin: + prefer = action_loader.find_plugin_with_context(action_name) + if prefer.resolved: + context = prefer + elif not context.resolved: + context = action_loader.find_plugin_with_context(action_name) + + if context.resolved: + return context.resolved_fqcn + if mandatory: + raise AnsibleParserError("Could not resolve action %s in module_defaults" % action_name) + display.vvvvv("Could not resolve action %s in module_defaults" % action_name) + + def squash(self): + ''' + Evaluates all attributes and sets them to the evaluated version, + so that all future accesses of attributes do not need to evaluate + parent attributes. + ''' + if not self._squashed: + for name in self.fattributes: + setattr(self, name, getattr(self, name)) + self._squashed = True + + def copy(self): + ''' + Create a copy of this object and return it. + ''' + + try: + new_me = self.__class__() + except RuntimeError as e: + raise AnsibleError("Exceeded maximum object depth. This may have been caused by excessive role recursion", orig_exc=e) + + for name in self.fattributes: + setattr(new_me, name, shallowcopy(getattr(self, f'_{name}', Sentinel))) + + new_me._loader = self._loader + new_me._variable_manager = self._variable_manager + new_me._validated = self._validated + new_me._finalized = self._finalized + new_me._uuid = self._uuid + + # if the ds value was set on the object, copy it to the new copy too + if hasattr(self, '_ds'): + new_me._ds = self._ds + + return new_me + + def get_validated_value(self, name, attribute, value, templar): + if attribute.isa == 'string': + value = to_text(value) + elif attribute.isa == 'int': + value = int(value) + elif attribute.isa == 'float': + value = float(value) + elif attribute.isa == 'bool': + value = boolean(value, strict=True) + elif attribute.isa == 'percent': + # special value, which may be an integer or float + # with an optional '%' at the end + if isinstance(value, string_types) and '%' in value: + value = value.replace('%', '') + value = float(value) + elif attribute.isa == 'list': + if value is None: + value = [] + elif not isinstance(value, list): + value = [value] + if attribute.listof is not None: + for item in value: + if not isinstance(item, attribute.listof): + raise AnsibleParserError("the field '%s' should be a list of %s, " + "but the item '%s' is a %s" % (name, attribute.listof, item, type(item)), obj=self.get_ds()) + elif attribute.required and attribute.listof == string_types: + if item is None or item.strip() == "": + raise AnsibleParserError("the field '%s' is required, and cannot have empty values" % (name,), obj=self.get_ds()) + elif attribute.isa == 'set': + if value is None: + value = set() + elif not isinstance(value, (list, set)): + if isinstance(value, string_types): + value = value.split(',') + else: + # Making a list like this handles strings of + # text and bytes properly + value = [value] + if not isinstance(value, set): + value = set(value) + elif attribute.isa == 'dict': + if value is None: + value = dict() + elif not isinstance(value, dict): + raise TypeError("%s is not a dictionary" % value) + elif attribute.isa == 'class': + if not isinstance(value, attribute.class_type): + raise TypeError("%s is not a valid %s (got a %s instead)" % (name, attribute.class_type, type(value))) + value.post_validate(templar=templar) + return value + + def set_to_context(self, name): + ''' set to parent inherited value or Sentinel as appropriate''' + + attribute = self.fattributes[name] + if isinstance(attribute, NonInheritableFieldAttribute): + # setting to sentinel will trigger 'default/default()' on getter + setattr(self, name, Sentinel) + else: + try: + setattr(self, name, self._get_parent_attribute(name, omit=True)) + except AttributeError: + # mostly playcontext as only tasks/handlers/blocks really resolve parent + setattr(self, name, Sentinel) + + def post_validate(self, templar): + ''' + we can't tell that everything is of the right type until we have + all the variables. Run basic types (from isa) as well as + any _post_validate_<foo> functions. + ''' + + # save the omit value for later checking + omit_value = templar.available_variables.get('omit') + + for (name, attribute) in self.fattributes.items(): + if attribute.static: + value = getattr(self, name) + + # we don't template 'vars' but allow template as values for later use + if name not in ('vars',) and templar.is_template(value): + display.warning('"%s" is not templatable, but we found: %s, ' + 'it will not be templated and will be used "as is".' % (name, value)) + continue + + if getattr(self, name) is None: + if not attribute.required: + continue + else: + raise AnsibleParserError("the field '%s' is required but was not set" % name) + elif not attribute.always_post_validate and self.__class__.__name__ not in ('Task', 'Handler', 'PlayContext'): + # Intermediate objects like Play() won't have their fields validated by + # default, as their values are often inherited by other objects and validated + # later, so we don't want them to fail out early + continue + + try: + # Run the post-validator if present. These methods are responsible for + # using the given templar to template the values, if required. + method = getattr(self, '_post_validate_%s' % name, None) + if method: + value = method(attribute, getattr(self, name), templar) + elif attribute.isa == 'class': + value = getattr(self, name) + else: + # if the attribute contains a variable, template it now + value = templar.template(getattr(self, name)) + + # If this evaluated to the omit value, set the value back to inherited by context + # or default specified in the FieldAttribute and move on + if omit_value is not None and value == omit_value: + self.set_to_context(name) + continue + + # and make sure the attribute is of the type it should be + if value is not None: + value = self.get_validated_value(name, attribute, value, templar) + + # and assign the massaged value back to the attribute field + setattr(self, name, value) + except (TypeError, ValueError) as e: + value = getattr(self, name) + raise AnsibleParserError("the field '%s' has an invalid value (%s), and could not be converted to an %s." + "The error was: %s" % (name, value, attribute.isa, e), obj=self.get_ds(), orig_exc=e) + except (AnsibleUndefinedVariable, UndefinedError) as e: + if templar._fail_on_undefined_errors and name != 'name': + if name == 'args': + msg = "The task includes an option with an undefined variable. The error was: %s" % (to_native(e)) + else: + msg = "The field '%s' has an invalid value, which includes an undefined variable. The error was: %s" % (name, to_native(e)) + raise AnsibleParserError(msg, obj=self.get_ds(), orig_exc=e) + + self._finalized = True + + def _load_vars(self, attr, ds): + ''' + Vars in a play can be specified either as a dictionary directly, or + as a list of dictionaries. If the later, this method will turn the + list into a single dictionary. + ''' + + def _validate_variable_keys(ds): + for key in ds: + if not isidentifier(key): + raise TypeError("'%s' is not a valid variable name" % key) + + try: + if isinstance(ds, dict): + _validate_variable_keys(ds) + return combine_vars(self.vars, ds) + elif isinstance(ds, list): + all_vars = self.vars + for item in ds: + if not isinstance(item, dict): + raise ValueError + _validate_variable_keys(item) + all_vars = combine_vars(all_vars, item) + return all_vars + elif ds is None: + return {} + else: + raise ValueError + except ValueError as e: + raise AnsibleParserError("Vars in a %s must be specified as a dictionary, or a list of dictionaries" % self.__class__.__name__, + obj=ds, orig_exc=e) + except TypeError as e: + raise AnsibleParserError("Invalid variable name in vars specified for %s: %s" % (self.__class__.__name__, e), obj=ds, orig_exc=e) + + def _extend_value(self, value, new_value, prepend=False): + ''' + Will extend the value given with new_value (and will turn both + into lists if they are not so already). The values are run through + a set to remove duplicate values. + ''' + + if not isinstance(value, list): + value = [value] + if not isinstance(new_value, list): + new_value = [new_value] + + # Due to where _extend_value may run for some attributes + # it is possible to end up with Sentinel in the list of values + # ensure we strip them + value = [v for v in value if v is not Sentinel] + new_value = [v for v in new_value if v is not Sentinel] + + if prepend: + combined = new_value + value + else: + combined = value + new_value + + return [i for i, _ in itertools.groupby(combined) if i is not None] + + def dump_attrs(self): + ''' + Dumps all attributes to a dictionary + ''' + attrs = {} + for (name, attribute) in self.fattributes.items(): + attr = getattr(self, name) + if attribute.isa == 'class' and hasattr(attr, 'serialize'): + attrs[name] = attr.serialize() + else: + attrs[name] = attr + return attrs + + def from_attrs(self, attrs): + ''' + Loads attributes from a dictionary + ''' + for (attr, value) in attrs.items(): + if attr in self.fattributes: + attribute = self.fattributes[attr] + if attribute.isa == 'class' and isinstance(value, dict): + obj = attribute.class_type() + obj.deserialize(value) + setattr(self, attr, obj) + else: + setattr(self, attr, value) + + # from_attrs is only used to create a finalized task + # from attrs from the Worker/TaskExecutor + # Those attrs are finalized and squashed in the TE + # and controller side use needs to reflect that + self._finalized = True + self._squashed = True + + def serialize(self): + ''' + Serializes the object derived from the base object into + a dictionary of values. This only serializes the field + attributes for the object, so this may need to be overridden + for any classes which wish to add additional items not stored + as field attributes. + ''' + + repr = self.dump_attrs() + + # serialize the uuid field + repr['uuid'] = self._uuid + repr['finalized'] = self._finalized + repr['squashed'] = self._squashed + + return repr + + def deserialize(self, data): + ''' + Given a dictionary of values, load up the field attributes for + this object. As with serialize(), if there are any non-field + attribute data members, this method will need to be overridden + and extended. + ''' + + if not isinstance(data, dict): + raise AnsibleAssertionError('data (%s) should be a dict but is a %s' % (data, type(data))) + + for (name, attribute) in self.fattributes.items(): + if name in data: + setattr(self, name, data[name]) + else: + self.set_to_context(name) + + # restore the UUID field + setattr(self, '_uuid', data.get('uuid')) + self._finalized = data.get('finalized', False) + self._squashed = data.get('squashed', False) + + +class Base(FieldAttributeBase): + + name = NonInheritableFieldAttribute(isa='string', default='', always_post_validate=True) + + # connection/transport + connection = ConnectionFieldAttribute(isa='string', default=context.cliargs_deferred_get('connection')) + port = FieldAttribute(isa='int') + remote_user = FieldAttribute(isa='string', default=context.cliargs_deferred_get('remote_user')) + + # variables + vars = NonInheritableFieldAttribute(isa='dict', priority=100, static=True) + + # module default params + module_defaults = FieldAttribute(isa='list', extend=True, prepend=True) + + # flags and misc. settings + environment = FieldAttribute(isa='list', extend=True, prepend=True) + no_log = FieldAttribute(isa='bool') + run_once = FieldAttribute(isa='bool') + ignore_errors = FieldAttribute(isa='bool') + ignore_unreachable = FieldAttribute(isa='bool') + check_mode = FieldAttribute(isa='bool', default=context.cliargs_deferred_get('check')) + diff = FieldAttribute(isa='bool', default=context.cliargs_deferred_get('diff')) + any_errors_fatal = FieldAttribute(isa='bool', default=C.ANY_ERRORS_FATAL) + throttle = FieldAttribute(isa='int', default=0) + timeout = FieldAttribute(isa='int', default=C.TASK_TIMEOUT) + + # explicitly invoke a debugger on tasks + debugger = FieldAttribute(isa='string') + + # Privilege escalation + become = FieldAttribute(isa='bool', default=context.cliargs_deferred_get('become')) + become_method = FieldAttribute(isa='string', default=context.cliargs_deferred_get('become_method')) + become_user = FieldAttribute(isa='string', default=context.cliargs_deferred_get('become_user')) + become_flags = FieldAttribute(isa='string', default=context.cliargs_deferred_get('become_flags')) + become_exe = FieldAttribute(isa='string', default=context.cliargs_deferred_get('become_exe')) + + # used to hold sudo/su stuff + DEPRECATED_ATTRIBUTES = [] # type: list[str] + + def get_path(self): + ''' return the absolute path of the playbook object and its line number ''' + + path = "" + try: + path = "%s:%s" % (self._ds._data_source, self._ds._line_number) + except AttributeError: + try: + path = "%s:%s" % (self._parent._play._ds._data_source, self._parent._play._ds._line_number) + except AttributeError: + pass + return path + + def get_dep_chain(self): + + if hasattr(self, '_parent') and self._parent: + return self._parent.get_dep_chain() + else: + return None + + def get_search_path(self): + ''' + Return the list of paths you should search for files, in order. + This follows role/playbook dependency chain. + ''' + path_stack = [] + + dep_chain = self.get_dep_chain() + # inside role: add the dependency chain from current to dependent + if dep_chain: + path_stack.extend(reversed([x._role_path for x in dep_chain if hasattr(x, '_role_path')])) + + # add path of task itself, unless it is already in the list + task_dir = os.path.dirname(self.get_path()) + if task_dir not in path_stack: + path_stack.append(task_dir) + + return path_stack diff --git a/lib/ansible/playbook/block.py b/lib/ansible/playbook/block.py new file mode 100644 index 0000000..fabaf7f --- /dev/null +++ b/lib/ansible/playbook/block.py @@ -0,0 +1,446 @@ +# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import ansible.constants as C +from ansible.errors import AnsibleParserError +from ansible.playbook.attribute import FieldAttribute, NonInheritableFieldAttribute +from ansible.playbook.base import Base +from ansible.playbook.conditional import Conditional +from ansible.playbook.collectionsearch import CollectionSearch +from ansible.playbook.helpers import load_list_of_tasks +from ansible.playbook.role import Role +from ansible.playbook.taggable import Taggable +from ansible.utils.sentinel import Sentinel + + +class Block(Base, Conditional, CollectionSearch, Taggable): + + # main block fields containing the task lists + block = NonInheritableFieldAttribute(isa='list', default=list) + rescue = NonInheritableFieldAttribute(isa='list', default=list) + always = NonInheritableFieldAttribute(isa='list', default=list) + + # other fields for task compat + notify = FieldAttribute(isa='list') + delegate_to = FieldAttribute(isa='string') + delegate_facts = FieldAttribute(isa='bool') + + # for future consideration? this would be functionally + # similar to the 'else' clause for exceptions + # otherwise = FieldAttribute(isa='list') + + def __init__(self, play=None, parent_block=None, role=None, task_include=None, use_handlers=False, implicit=False): + self._play = play + self._role = role + self._parent = None + self._dep_chain = None + self._use_handlers = use_handlers + self._implicit = implicit + + if task_include: + self._parent = task_include + elif parent_block: + self._parent = parent_block + + super(Block, self).__init__() + + def __repr__(self): + return "BLOCK(uuid=%s)(id=%s)(parent=%s)" % (self._uuid, id(self), self._parent) + + def __eq__(self, other): + '''object comparison based on _uuid''' + return self._uuid == other._uuid + + def __ne__(self, other): + '''object comparison based on _uuid''' + return self._uuid != other._uuid + + def get_vars(self): + ''' + Blocks do not store variables directly, however they may be a member + of a role or task include which does, so return those if present. + ''' + + all_vars = {} + + if self._parent: + all_vars |= self._parent.get_vars() + + all_vars |= self.vars.copy() + + return all_vars + + @staticmethod + def load(data, play=None, parent_block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None): + implicit = not Block.is_block(data) + b = Block(play=play, parent_block=parent_block, role=role, task_include=task_include, use_handlers=use_handlers, implicit=implicit) + return b.load_data(data, variable_manager=variable_manager, loader=loader) + + @staticmethod + def is_block(ds): + is_block = False + if isinstance(ds, dict): + for attr in ('block', 'rescue', 'always'): + if attr in ds: + is_block = True + break + return is_block + + def preprocess_data(self, ds): + ''' + If a simple task is given, an implicit block for that single task + is created, which goes in the main portion of the block + ''' + + if not Block.is_block(ds): + if isinstance(ds, list): + return super(Block, self).preprocess_data(dict(block=ds)) + else: + return super(Block, self).preprocess_data(dict(block=[ds])) + + return super(Block, self).preprocess_data(ds) + + def _load_block(self, attr, ds): + try: + return load_list_of_tasks( + ds, + play=self._play, + block=self, + role=self._role, + task_include=None, + variable_manager=self._variable_manager, + loader=self._loader, + use_handlers=self._use_handlers, + ) + except AssertionError as e: + raise AnsibleParserError("A malformed block was encountered while loading a block", obj=self._ds, orig_exc=e) + + def _load_rescue(self, attr, ds): + try: + return load_list_of_tasks( + ds, + play=self._play, + block=self, + role=self._role, + task_include=None, + variable_manager=self._variable_manager, + loader=self._loader, + use_handlers=self._use_handlers, + ) + except AssertionError as e: + raise AnsibleParserError("A malformed block was encountered while loading rescue.", obj=self._ds, orig_exc=e) + + def _load_always(self, attr, ds): + try: + return load_list_of_tasks( + ds, + play=self._play, + block=self, + role=self._role, + task_include=None, + variable_manager=self._variable_manager, + loader=self._loader, + use_handlers=self._use_handlers, + ) + except AssertionError as e: + raise AnsibleParserError("A malformed block was encountered while loading always", obj=self._ds, orig_exc=e) + + def _validate_always(self, attr, name, value): + if value and not self.block: + raise AnsibleParserError("'%s' keyword cannot be used without 'block'" % name, obj=self._ds) + + _validate_rescue = _validate_always + + def get_dep_chain(self): + if self._dep_chain is None: + if self._parent: + return self._parent.get_dep_chain() + else: + return None + else: + return self._dep_chain[:] + + def copy(self, exclude_parent=False, exclude_tasks=False): + def _dupe_task_list(task_list, new_block): + new_task_list = [] + for task in task_list: + new_task = task.copy(exclude_parent=True) + if task._parent: + new_task._parent = task._parent.copy(exclude_tasks=True) + if task._parent == new_block: + # If task._parent is the same as new_block, just replace it + new_task._parent = new_block + else: + # task may not be a direct child of new_block, search for the correct place to insert new_block + cur_obj = new_task._parent + while cur_obj._parent and cur_obj._parent != new_block: + cur_obj = cur_obj._parent + + cur_obj._parent = new_block + else: + new_task._parent = new_block + new_task_list.append(new_task) + return new_task_list + + new_me = super(Block, self).copy() + new_me._play = self._play + new_me._use_handlers = self._use_handlers + + if self._dep_chain is not None: + new_me._dep_chain = self._dep_chain[:] + + new_me._parent = None + if self._parent and not exclude_parent: + new_me._parent = self._parent.copy(exclude_tasks=True) + + if not exclude_tasks: + new_me.block = _dupe_task_list(self.block or [], new_me) + new_me.rescue = _dupe_task_list(self.rescue or [], new_me) + new_me.always = _dupe_task_list(self.always or [], new_me) + + new_me._role = None + if self._role: + new_me._role = self._role + + new_me.validate() + return new_me + + def serialize(self): + ''' + Override of the default serialize method, since when we're serializing + a task we don't want to include the attribute list of tasks. + ''' + + data = dict() + for attr in self.fattributes: + if attr not in ('block', 'rescue', 'always'): + data[attr] = getattr(self, attr) + + data['dep_chain'] = self.get_dep_chain() + + if self._role is not None: + data['role'] = self._role.serialize() + if self._parent is not None: + data['parent'] = self._parent.copy(exclude_tasks=True).serialize() + data['parent_type'] = self._parent.__class__.__name__ + + return data + + def deserialize(self, data): + ''' + Override of the default deserialize method, to match the above overridden + serialize method + ''' + + # import is here to avoid import loops + from ansible.playbook.task_include import TaskInclude + from ansible.playbook.handler_task_include import HandlerTaskInclude + + # we don't want the full set of attributes (the task lists), as that + # would lead to a serialize/deserialize loop + for attr in self.fattributes: + if attr in data and attr not in ('block', 'rescue', 'always'): + setattr(self, attr, data.get(attr)) + + self._dep_chain = data.get('dep_chain', None) + + # if there was a serialized role, unpack it too + role_data = data.get('role') + if role_data: + r = Role() + r.deserialize(role_data) + self._role = r + + parent_data = data.get('parent') + if parent_data: + parent_type = data.get('parent_type') + if parent_type == 'Block': + p = Block() + elif parent_type == 'TaskInclude': + p = TaskInclude() + elif parent_type == 'HandlerTaskInclude': + p = HandlerTaskInclude() + p.deserialize(parent_data) + self._parent = p + self._dep_chain = self._parent.get_dep_chain() + + def set_loader(self, loader): + self._loader = loader + if self._parent: + self._parent.set_loader(loader) + elif self._role: + self._role.set_loader(loader) + + dep_chain = self.get_dep_chain() + if dep_chain: + for dep in dep_chain: + dep.set_loader(loader) + + def _get_parent_attribute(self, attr, omit=False): + ''' + Generic logic to get the attribute or parent attribute for a block value. + ''' + fattr = self.fattributes[attr] + + extend = fattr.extend + prepend = fattr.prepend + + try: + # omit self, and only get parent values + if omit: + value = Sentinel + else: + value = getattr(self, f'_{attr}', Sentinel) + + # If parent is static, we can grab attrs from the parent + # otherwise, defer to the grandparent + if getattr(self._parent, 'statically_loaded', True): + _parent = self._parent + else: + _parent = self._parent._parent + + if _parent and (value is Sentinel or extend): + try: + if getattr(_parent, 'statically_loaded', True): + if hasattr(_parent, '_get_parent_attribute'): + parent_value = _parent._get_parent_attribute(attr) + else: + parent_value = getattr(_parent, f'_{attr}', Sentinel) + if extend: + value = self._extend_value(value, parent_value, prepend) + else: + value = parent_value + except AttributeError: + pass + if self._role and (value is Sentinel or extend): + try: + parent_value = getattr(self._role, f'_{attr}', Sentinel) + if extend: + value = self._extend_value(value, parent_value, prepend) + else: + value = parent_value + + dep_chain = self.get_dep_chain() + if dep_chain and (value is Sentinel or extend): + dep_chain.reverse() + for dep in dep_chain: + dep_value = getattr(dep, f'_{attr}', Sentinel) + if extend: + value = self._extend_value(value, dep_value, prepend) + else: + value = dep_value + + if value is not Sentinel and not extend: + break + except AttributeError: + pass + if self._play and (value is Sentinel or extend): + try: + play_value = getattr(self._play, f'_{attr}', Sentinel) + if play_value is not Sentinel: + if extend: + value = self._extend_value(value, play_value, prepend) + else: + value = play_value + except AttributeError: + pass + except KeyError: + pass + + return value + + def filter_tagged_tasks(self, all_vars): + ''' + Creates a new block, with task lists filtered based on the tags. + ''' + + def evaluate_and_append_task(target): + tmp_list = [] + for task in target: + if isinstance(task, Block): + filtered_block = evaluate_block(task) + if filtered_block.has_tasks(): + tmp_list.append(filtered_block) + elif ((task.action in C._ACTION_META and task.implicit) or + (task.action in C._ACTION_INCLUDE and task.evaluate_tags([], self._play.skip_tags, all_vars=all_vars)) or + task.evaluate_tags(self._play.only_tags, self._play.skip_tags, all_vars=all_vars)): + tmp_list.append(task) + return tmp_list + + def evaluate_block(block): + new_block = block.copy(exclude_parent=True, exclude_tasks=True) + new_block._parent = block._parent + new_block.block = evaluate_and_append_task(block.block) + new_block.rescue = evaluate_and_append_task(block.rescue) + new_block.always = evaluate_and_append_task(block.always) + return new_block + + return evaluate_block(self) + + def get_tasks(self): + def evaluate_and_append_task(target): + tmp_list = [] + for task in target: + if isinstance(task, Block): + tmp_list.extend(evaluate_block(task)) + else: + tmp_list.append(task) + return tmp_list + + def evaluate_block(block): + rv = evaluate_and_append_task(block.block) + rv.extend(evaluate_and_append_task(block.rescue)) + rv.extend(evaluate_and_append_task(block.always)) + return rv + + return evaluate_block(self) + + def has_tasks(self): + return len(self.block) > 0 or len(self.rescue) > 0 or len(self.always) > 0 + + def get_include_params(self): + if self._parent: + return self._parent.get_include_params() + else: + return dict() + + def all_parents_static(self): + ''' + Determine if all of the parents of this block were statically loaded + or not. Since Task/TaskInclude objects may be in the chain, they simply + call their parents all_parents_static() method. Only Block objects in + the chain check the statically_loaded value of the parent. + ''' + from ansible.playbook.task_include import TaskInclude + if self._parent: + if isinstance(self._parent, TaskInclude) and not self._parent.statically_loaded: + return False + return self._parent.all_parents_static() + + return True + + def get_first_parent_include(self): + from ansible.playbook.task_include import TaskInclude + if self._parent: + if isinstance(self._parent, TaskInclude): + return self._parent + return self._parent.get_first_parent_include() + return None diff --git a/lib/ansible/playbook/collectionsearch.py b/lib/ansible/playbook/collectionsearch.py new file mode 100644 index 0000000..2980093 --- /dev/null +++ b/lib/ansible/playbook/collectionsearch.py @@ -0,0 +1,63 @@ +# Copyright: (c) 2019, Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +from ansible.module_utils.six import string_types +from ansible.playbook.attribute import FieldAttribute +from ansible.utils.collection_loader import AnsibleCollectionConfig +from ansible.template import is_template +from ansible.utils.display import Display + +from jinja2.nativetypes import NativeEnvironment + +display = Display() + + +def _ensure_default_collection(collection_list=None): + default_collection = AnsibleCollectionConfig.default_collection + + # Will be None when used as the default + if collection_list is None: + collection_list = [] + + # FIXME: exclude role tasks? + if default_collection and default_collection not in collection_list: + collection_list.insert(0, default_collection) + + # if there's something in the list, ensure that builtin or legacy is always there too + if collection_list and 'ansible.builtin' not in collection_list and 'ansible.legacy' not in collection_list: + collection_list.append('ansible.legacy') + + return collection_list + + +class CollectionSearch: + + # this needs to be populated before we can resolve tasks/roles/etc + collections = FieldAttribute(isa='list', listof=string_types, priority=100, default=_ensure_default_collection, + always_post_validate=True, static=True) + + def _load_collections(self, attr, ds): + # We are always a mixin with Base, so we can validate this untemplated + # field early on to guarantee we are dealing with a list. + ds = self.get_validated_value('collections', self.fattributes.get('collections'), ds, None) + + # this will only be called if someone specified a value; call the shared value + _ensure_default_collection(collection_list=ds) + + if not ds: # don't return an empty collection list, just return None + return None + + # This duplicates static attr checking logic from post_validate() + # because if the user attempts to template a collection name, it may + # error before it ever gets to the post_validate() warning (e.g. trying + # to import a role from the collection). + env = NativeEnvironment() + for collection_name in ds: + if is_template(collection_name, env): + display.warning('"collections" is not templatable, but we found: %s, ' + 'it will not be templated and will be used "as is".' % (collection_name)) + + return ds diff --git a/lib/ansible/playbook/conditional.py b/lib/ansible/playbook/conditional.py new file mode 100644 index 0000000..fe07358 --- /dev/null +++ b/lib/ansible/playbook/conditional.py @@ -0,0 +1,221 @@ +# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import ast +import re + +from jinja2.compiler import generate +from jinja2.exceptions import UndefinedError + +from ansible import constants as C +from ansible.errors import AnsibleError, AnsibleUndefinedVariable +from ansible.module_utils.six import text_type +from ansible.module_utils._text import to_native, to_text +from ansible.playbook.attribute import FieldAttribute +from ansible.utils.display import Display + +display = Display() + +DEFINED_REGEX = re.compile(r'(hostvars\[.+\]|[\w_]+)\s+(not\s+is|is|is\s+not)\s+(defined|undefined)') +LOOKUP_REGEX = re.compile(r'lookup\s*\(') +VALID_VAR_REGEX = re.compile("^[_A-Za-z][_a-zA-Z0-9]*$") + + +class Conditional: + + ''' + This is a mix-in class, to be used with Base to allow the object + to be run conditionally when a condition is met or skipped. + ''' + + when = FieldAttribute(isa='list', default=list, extend=True, prepend=True) + + def __init__(self, loader=None): + # when used directly, this class needs a loader, but we want to + # make sure we don't trample on the existing one if this class + # is used as a mix-in with a playbook base class + if not hasattr(self, '_loader'): + if loader is None: + raise AnsibleError("a loader must be specified when using Conditional() directly") + else: + self._loader = loader + super(Conditional, self).__init__() + + def _validate_when(self, attr, name, value): + if not isinstance(value, list): + setattr(self, name, [value]) + + def extract_defined_undefined(self, conditional): + results = [] + + cond = conditional + m = DEFINED_REGEX.search(cond) + while m: + results.append(m.groups()) + cond = cond[m.end():] + m = DEFINED_REGEX.search(cond) + + return results + + def evaluate_conditional(self, templar, all_vars): + ''' + Loops through the conditionals set on this object, returning + False if any of them evaluate as such. + ''' + + # since this is a mix-in, it may not have an underlying datastructure + # associated with it, so we pull it out now in case we need it for + # error reporting below + ds = None + if hasattr(self, '_ds'): + ds = getattr(self, '_ds') + + result = True + try: + for conditional in self.when: + + # do evaluation + if conditional is None or conditional == '': + res = True + elif isinstance(conditional, bool): + res = conditional + else: + res = self._check_conditional(conditional, templar, all_vars) + + # only update if still true, preserve false + if result: + result = res + + display.debug("Evaluated conditional (%s): %s" % (conditional, res)) + if not result: + break + + except Exception as e: + raise AnsibleError("The conditional check '%s' failed. The error was: %s" % (to_native(conditional), to_native(e)), obj=ds) + + return result + + def _check_conditional(self, conditional, templar, all_vars): + ''' + This method does the low-level evaluation of each conditional + set on this object, using jinja2 to wrap the conditionals for + evaluation. + ''' + + original = conditional + + if templar.is_template(conditional): + display.warning('conditional statements should not include jinja2 ' + 'templating delimiters such as {{ }} or {%% %%}. ' + 'Found: %s' % conditional) + + # make sure the templar is using the variables specified with this method + templar.available_variables = all_vars + + try: + # if the conditional is "unsafe", disable lookups + disable_lookups = hasattr(conditional, '__UNSAFE__') + conditional = templar.template(conditional, disable_lookups=disable_lookups) + + if not isinstance(conditional, text_type) or conditional == "": + return conditional + + # update the lookups flag, as the string returned above may now be unsafe + # and we don't want future templating calls to do unsafe things + disable_lookups |= hasattr(conditional, '__UNSAFE__') + + # First, we do some low-level jinja2 parsing involving the AST format of the + # statement to ensure we don't do anything unsafe (using the disable_lookup flag above) + class CleansingNodeVisitor(ast.NodeVisitor): + def generic_visit(self, node, inside_call=False, inside_yield=False): + if isinstance(node, ast.Call): + inside_call = True + elif isinstance(node, ast.Yield): + inside_yield = True + elif isinstance(node, ast.Str): + if disable_lookups: + if inside_call and node.s.startswith("__"): + # calling things with a dunder is generally bad at this point... + raise AnsibleError( + "Invalid access found in the conditional: '%s'" % conditional + ) + elif inside_yield: + # we're inside a yield, so recursively parse and traverse the AST + # of the result to catch forbidden syntax from executing + parsed = ast.parse(node.s, mode='exec') + cnv = CleansingNodeVisitor() + cnv.visit(parsed) + # iterate over all child nodes + for child_node in ast.iter_child_nodes(node): + self.generic_visit( + child_node, + inside_call=inside_call, + inside_yield=inside_yield + ) + try: + res = templar.environment.parse(conditional, None, None) + res = generate(res, templar.environment, None, None) + parsed = ast.parse(res, mode='exec') + + cnv = CleansingNodeVisitor() + cnv.visit(parsed) + except Exception as e: + raise AnsibleError("Invalid conditional detected: %s" % to_native(e)) + + # and finally we generate and template the presented string and look at the resulting string + # NOTE The spaces around True and False are intentional to short-circuit literal_eval for + # jinja2_native=False and avoid its expensive calls. + presented = "{%% if %s %%} True {%% else %%} False {%% endif %%}" % conditional + val = templar.template(presented, disable_lookups=disable_lookups).strip() + if val == "True": + return True + elif val == "False": + return False + else: + raise AnsibleError("unable to evaluate conditional: %s" % original) + except (AnsibleUndefinedVariable, UndefinedError) as e: + # the templating failed, meaning most likely a variable was undefined. If we happened + # to be looking for an undefined variable, return True, otherwise fail + try: + # first we extract the variable name from the error message + var_name = re.compile(r"'(hostvars\[.+\]|[\w_]+)' is undefined").search(str(e)).groups()[0] + # next we extract all defined/undefined tests from the conditional string + def_undef = self.extract_defined_undefined(conditional) + # then we loop through these, comparing the error variable name against + # each def/undef test we found above. If there is a match, we determine + # whether the logic/state mean the variable should exist or not and return + # the corresponding True/False + for (du_var, logic, state) in def_undef: + # when we compare the var names, normalize quotes because something + # like hostvars['foo'] may be tested against hostvars["foo"] + if var_name.replace("'", '"') == du_var.replace("'", '"'): + # the should exist is a xor test between a negation in the logic portion + # against the state (defined or undefined) + should_exist = ('not' in logic) != (state == 'defined') + if should_exist: + return False + else: + return True + # as nothing above matched the failed var name, re-raise here to + # trigger the AnsibleUndefinedVariable exception again below + raise + except Exception: + raise AnsibleUndefinedVariable("error while evaluating conditional (%s): %s" % (original, e)) diff --git a/lib/ansible/playbook/handler.py b/lib/ansible/playbook/handler.py new file mode 100644 index 0000000..675eecb --- /dev/null +++ b/lib/ansible/playbook/handler.py @@ -0,0 +1,62 @@ +# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +from ansible.playbook.attribute import FieldAttribute +from ansible.playbook.task import Task +from ansible.module_utils.six import string_types + + +class Handler(Task): + + listen = FieldAttribute(isa='list', default=list, listof=string_types, static=True) + + def __init__(self, block=None, role=None, task_include=None): + self.notified_hosts = [] + + self.cached_name = False + + super(Handler, self).__init__(block=block, role=role, task_include=task_include) + + def __repr__(self): + ''' returns a human readable representation of the handler ''' + return "HANDLER: %s" % self.get_name() + + @staticmethod + def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None): + t = Handler(block=block, role=role, task_include=task_include) + return t.load_data(data, variable_manager=variable_manager, loader=loader) + + def notify_host(self, host): + if not self.is_host_notified(host): + self.notified_hosts.append(host) + return True + return False + + def remove_host(self, host): + self.notified_hosts = [h for h in self.notified_hosts if h != host] + + def is_host_notified(self, host): + return host in self.notified_hosts + + def serialize(self): + result = super(Handler, self).serialize() + result['is_handler'] = True + return result diff --git a/lib/ansible/playbook/handler_task_include.py b/lib/ansible/playbook/handler_task_include.py new file mode 100644 index 0000000..1c779f8 --- /dev/null +++ b/lib/ansible/playbook/handler_task_include.py @@ -0,0 +1,39 @@ +# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +# from ansible.inventory.host import Host +from ansible.playbook.handler import Handler +from ansible.playbook.task_include import TaskInclude + + +class HandlerTaskInclude(Handler, TaskInclude): + + VALID_INCLUDE_KEYWORDS = TaskInclude.VALID_INCLUDE_KEYWORDS.union(('listen',)) + + @staticmethod + def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None): + t = HandlerTaskInclude(block=block, role=role, task_include=task_include) + handler = t.check_options( + t.load_data(data, variable_manager=variable_manager, loader=loader), + data + ) + + return handler diff --git a/lib/ansible/playbook/helpers.py b/lib/ansible/playbook/helpers.py new file mode 100644 index 0000000..38e32ef --- /dev/null +++ b/lib/ansible/playbook/helpers.py @@ -0,0 +1,353 @@ +# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os + +from ansible import constants as C +from ansible.errors import AnsibleParserError, AnsibleUndefinedVariable, AnsibleFileNotFound, AnsibleAssertionError +from ansible.module_utils._text import to_native +from ansible.module_utils.six import string_types +from ansible.parsing.mod_args import ModuleArgsParser +from ansible.utils.display import Display + +display = Display() + + +def load_list_of_blocks(ds, play, parent_block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None): + ''' + Given a list of mixed task/block data (parsed from YAML), + return a list of Block() objects, where implicit blocks + are created for each bare Task. + ''' + + # we import here to prevent a circular dependency with imports + from ansible.playbook.block import Block + + if not isinstance(ds, (list, type(None))): + raise AnsibleAssertionError('%s should be a list or None but is %s' % (ds, type(ds))) + + block_list = [] + if ds: + count = iter(range(len(ds))) + for i in count: + block_ds = ds[i] + # Implicit blocks are created by bare tasks listed in a play without + # an explicit block statement. If we have two implicit blocks in a row, + # squash them down to a single block to save processing time later. + implicit_blocks = [] + while block_ds is not None and not Block.is_block(block_ds): + implicit_blocks.append(block_ds) + i += 1 + # Advance the iterator, so we don't repeat + next(count, None) + try: + block_ds = ds[i] + except IndexError: + block_ds = None + + # Loop both implicit blocks and block_ds as block_ds is the next in the list + for b in (implicit_blocks, block_ds): + if b: + block_list.append( + Block.load( + b, + play=play, + parent_block=parent_block, + role=role, + task_include=task_include, + use_handlers=use_handlers, + variable_manager=variable_manager, + loader=loader, + ) + ) + + return block_list + + +def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None): + ''' + Given a list of task datastructures (parsed from YAML), + return a list of Task() or TaskInclude() objects. + ''' + + # we import here to prevent a circular dependency with imports + from ansible.playbook.block import Block + from ansible.playbook.handler import Handler + from ansible.playbook.task import Task + from ansible.playbook.task_include import TaskInclude + from ansible.playbook.role_include import IncludeRole + from ansible.playbook.handler_task_include import HandlerTaskInclude + from ansible.template import Templar + from ansible.utils.plugin_docs import get_versioned_doclink + + if not isinstance(ds, list): + raise AnsibleAssertionError('The ds (%s) should be a list but was a %s' % (ds, type(ds))) + + task_list = [] + for task_ds in ds: + if not isinstance(task_ds, dict): + raise AnsibleAssertionError('The ds (%s) should be a dict but was a %s' % (ds, type(ds))) + + if 'block' in task_ds: + if use_handlers: + raise AnsibleParserError("Using a block as a handler is not supported.", obj=task_ds) + t = Block.load( + task_ds, + play=play, + parent_block=block, + role=role, + task_include=task_include, + use_handlers=use_handlers, + variable_manager=variable_manager, + loader=loader, + ) + task_list.append(t) + else: + args_parser = ModuleArgsParser(task_ds) + try: + (action, args, delegate_to) = args_parser.parse(skip_action_validation=True) + except AnsibleParserError as e: + # if the raises exception was created with obj=ds args, then it includes the detail + # so we dont need to add it so we can just re raise. + if e.obj: + raise + # But if it wasn't, we can add the yaml object now to get more detail + raise AnsibleParserError(to_native(e), obj=task_ds, orig_exc=e) + + if action in C._ACTION_ALL_INCLUDE_IMPORT_TASKS: + + if use_handlers: + include_class = HandlerTaskInclude + else: + include_class = TaskInclude + + t = include_class.load( + task_ds, + block=block, + role=role, + task_include=None, + variable_manager=variable_manager, + loader=loader + ) + + all_vars = variable_manager.get_vars(play=play, task=t) + templar = Templar(loader=loader, variables=all_vars) + + # check to see if this include is dynamic or static: + # 1. the user has set the 'static' option to false or true + # 2. one of the appropriate config options was set + if action in C._ACTION_INCLUDE_TASKS: + is_static = False + elif action in C._ACTION_IMPORT_TASKS: + is_static = True + else: + include_link = get_versioned_doclink('user_guide/playbooks_reuse_includes.html') + display.deprecated('"include" is deprecated, use include_tasks/import_tasks instead. See %s for details' % include_link, "2.16") + is_static = not templar.is_template(t.args['_raw_params']) and t.all_parents_static() and not t.loop + + if is_static: + if t.loop is not None: + if action in C._ACTION_IMPORT_TASKS: + raise AnsibleParserError("You cannot use loops on 'import_tasks' statements. You should use 'include_tasks' instead.", obj=task_ds) + else: + raise AnsibleParserError("You cannot use 'static' on an include with a loop", obj=task_ds) + + # we set a flag to indicate this include was static + t.statically_loaded = True + + # handle relative includes by walking up the list of parent include + # tasks and checking the relative result to see if it exists + parent_include = block + cumulative_path = None + + found = False + subdir = 'tasks' + if use_handlers: + subdir = 'handlers' + while parent_include is not None: + if not isinstance(parent_include, TaskInclude): + parent_include = parent_include._parent + continue + try: + parent_include_dir = os.path.dirname(templar.template(parent_include.args.get('_raw_params'))) + except AnsibleUndefinedVariable as e: + if not parent_include.statically_loaded: + raise AnsibleParserError( + "Error when evaluating variable in dynamic parent include path: %s. " + "When using static imports, the parent dynamic include cannot utilize host facts " + "or variables from inventory" % parent_include.args.get('_raw_params'), + obj=task_ds, + suppress_extended_error=True, + orig_exc=e + ) + raise + if cumulative_path is None: + cumulative_path = parent_include_dir + elif not os.path.isabs(cumulative_path): + cumulative_path = os.path.join(parent_include_dir, cumulative_path) + include_target = templar.template(t.args['_raw_params']) + if t._role: + new_basedir = os.path.join(t._role._role_path, subdir, cumulative_path) + include_file = loader.path_dwim_relative(new_basedir, subdir, include_target) + else: + include_file = loader.path_dwim_relative(loader.get_basedir(), cumulative_path, include_target) + + if os.path.exists(include_file): + found = True + break + else: + parent_include = parent_include._parent + + if not found: + try: + include_target = templar.template(t.args['_raw_params']) + except AnsibleUndefinedVariable as e: + raise AnsibleParserError( + "Error when evaluating variable in import path: %s.\n\n" + "When using static imports, ensure that any variables used in their names are defined in vars/vars_files\n" + "or extra-vars passed in from the command line. Static imports cannot use variables from facts or inventory\n" + "sources like group or host vars." % t.args['_raw_params'], + obj=task_ds, + suppress_extended_error=True, + orig_exc=e) + if t._role: + include_file = loader.path_dwim_relative(t._role._role_path, subdir, include_target) + else: + include_file = loader.path_dwim(include_target) + + data = loader.load_from_file(include_file) + if not data: + display.warning('file %s is empty and had no tasks to include' % include_file) + continue + elif not isinstance(data, list): + raise AnsibleParserError("included task files must contain a list of tasks", obj=data) + + # since we can't send callbacks here, we display a message directly in + # the same fashion used by the on_include callback. We also do it here, + # because the recursive nature of helper methods means we may be loading + # nested includes, and we want the include order printed correctly + display.vv("statically imported: %s" % include_file) + + ti_copy = t.copy(exclude_parent=True) + ti_copy._parent = block + included_blocks = load_list_of_blocks( + data, + play=play, + parent_block=None, + task_include=ti_copy, + role=role, + use_handlers=use_handlers, + loader=loader, + variable_manager=variable_manager, + ) + + tags = ti_copy.tags[:] + + # now we extend the tags on each of the included blocks + for b in included_blocks: + b.tags = list(set(b.tags).union(tags)) + # END FIXME + + # FIXME: handlers shouldn't need this special handling, but do + # right now because they don't iterate blocks correctly + if use_handlers: + for b in included_blocks: + task_list.extend(b.block) + else: + task_list.extend(included_blocks) + else: + t.is_static = False + task_list.append(t) + + elif action in C._ACTION_ALL_PROPER_INCLUDE_IMPORT_ROLES: + if use_handlers: + raise AnsibleParserError(f"Using '{action}' as a handler is not supported.", obj=task_ds) + + ir = IncludeRole.load( + task_ds, + block=block, + role=role, + task_include=None, + variable_manager=variable_manager, + loader=loader, + ) + + # 1. the user has set the 'static' option to false or true + # 2. one of the appropriate config options was set + is_static = False + if action in C._ACTION_IMPORT_ROLE: + is_static = True + + if is_static: + if ir.loop is not None: + if action in C._ACTION_IMPORT_ROLE: + raise AnsibleParserError("You cannot use loops on 'import_role' statements. You should use 'include_role' instead.", obj=task_ds) + else: + raise AnsibleParserError("You cannot use 'static' on an include_role with a loop", obj=task_ds) + + # we set a flag to indicate this include was static + ir.statically_loaded = True + + # template the role name now, if needed + all_vars = variable_manager.get_vars(play=play, task=ir) + templar = Templar(loader=loader, variables=all_vars) + ir._role_name = templar.template(ir._role_name) + + # uses compiled list from object + blocks, _ = ir.get_block_list(variable_manager=variable_manager, loader=loader) + task_list.extend(blocks) + else: + # passes task object itself for latter generation of list + task_list.append(ir) + else: + if use_handlers: + t = Handler.load(task_ds, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader) + else: + t = Task.load(task_ds, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader) + + task_list.append(t) + + return task_list + + +def load_list_of_roles(ds, play, current_role_path=None, variable_manager=None, loader=None, collection_search_list=None): + """ + Loads and returns a list of RoleInclude objects from the ds list of role definitions + :param ds: list of roles to load + :param play: calling Play object + :param current_role_path: path of the owning role, if any + :param variable_manager: varmgr to use for templating + :param loader: loader to use for DS parsing/services + :param collection_search_list: list of collections to search for unqualified role names + :return: + """ + # we import here to prevent a circular dependency with imports + from ansible.playbook.role.include import RoleInclude + + if not isinstance(ds, list): + raise AnsibleAssertionError('ds (%s) should be a list but was a %s' % (ds, type(ds))) + + roles = [] + for role_def in ds: + i = RoleInclude.load(role_def, play=play, current_role_path=current_role_path, variable_manager=variable_manager, + loader=loader, collection_list=collection_search_list) + roles.append(i) + + return roles diff --git a/lib/ansible/playbook/included_file.py b/lib/ansible/playbook/included_file.py new file mode 100644 index 0000000..409eaec --- /dev/null +++ b/lib/ansible/playbook/included_file.py @@ -0,0 +1,223 @@ +# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os + +from ansible import constants as C +from ansible.errors import AnsibleError +from ansible.executor.task_executor import remove_omit +from ansible.module_utils._text import to_text +from ansible.playbook.handler import Handler +from ansible.playbook.task_include import TaskInclude +from ansible.playbook.role_include import IncludeRole +from ansible.template import Templar +from ansible.utils.display import Display + +display = Display() + + +class IncludedFile: + + def __init__(self, filename, args, vars, task, is_role=False): + self._filename = filename + self._args = args + self._vars = vars + self._task = task + self._hosts = [] + self._is_role = is_role + self._results = [] + + def add_host(self, host): + if host not in self._hosts: + self._hosts.append(host) + return + raise ValueError() + + def __eq__(self, other): + return (other._filename == self._filename and + other._args == self._args and + other._vars == self._vars and + other._task._uuid == self._task._uuid and + other._task._parent._uuid == self._task._parent._uuid) + + def __repr__(self): + return "%s (args=%s vars=%s): %s" % (self._filename, self._args, self._vars, self._hosts) + + @staticmethod + def process_include_results(results, iterator, loader, variable_manager): + included_files = [] + task_vars_cache = {} + + for res in results: + + original_host = res._host + original_task = res._task + + if original_task.action in C._ACTION_ALL_INCLUDES: + if original_task.action in C._ACTION_INCLUDE: + display.deprecated('"include" is deprecated, use include_tasks/import_tasks/import_playbook instead', "2.16") + + if original_task.loop: + if 'results' not in res._result: + continue + include_results = res._result['results'] + else: + include_results = [res._result] + + for include_result in include_results: + # if the task result was skipped or failed, continue + if 'skipped' in include_result and include_result['skipped'] or 'failed' in include_result and include_result['failed']: + continue + + cache_key = (iterator._play, original_host, original_task) + try: + task_vars = task_vars_cache[cache_key] + except KeyError: + task_vars = task_vars_cache[cache_key] = variable_manager.get_vars(play=iterator._play, host=original_host, task=original_task) + + include_args = include_result.get('include_args', dict()) + special_vars = {} + loop_var = include_result.get('ansible_loop_var', 'item') + index_var = include_result.get('ansible_index_var') + if loop_var in include_result: + task_vars[loop_var] = special_vars[loop_var] = include_result[loop_var] + if index_var and index_var in include_result: + task_vars[index_var] = special_vars[index_var] = include_result[index_var] + if '_ansible_item_label' in include_result: + task_vars['_ansible_item_label'] = special_vars['_ansible_item_label'] = include_result['_ansible_item_label'] + if 'ansible_loop' in include_result: + task_vars['ansible_loop'] = special_vars['ansible_loop'] = include_result['ansible_loop'] + if original_task.no_log and '_ansible_no_log' not in include_args: + task_vars['_ansible_no_log'] = special_vars['_ansible_no_log'] = original_task.no_log + + # get search path for this task to pass to lookup plugins that may be used in pathing to + # the included file + task_vars['ansible_search_path'] = original_task.get_search_path() + + # ensure basedir is always in (dwim already searches here but we need to display it) + if loader.get_basedir() not in task_vars['ansible_search_path']: + task_vars['ansible_search_path'].append(loader.get_basedir()) + + templar = Templar(loader=loader, variables=task_vars) + + if original_task.action in C._ACTION_ALL_INCLUDE_TASKS: + include_file = None + + if original_task._parent: + # handle relative includes by walking up the list of parent include + # tasks and checking the relative result to see if it exists + parent_include = original_task._parent + cumulative_path = None + while parent_include is not None: + if not isinstance(parent_include, TaskInclude): + parent_include = parent_include._parent + continue + if isinstance(parent_include, IncludeRole): + parent_include_dir = parent_include._role_path + else: + try: + parent_include_dir = os.path.dirname(templar.template(parent_include.args.get('_raw_params'))) + except AnsibleError as e: + parent_include_dir = '' + display.warning( + 'Templating the path of the parent %s failed. The path to the ' + 'included file may not be found. ' + 'The error was: %s.' % (original_task.action, to_text(e)) + ) + if cumulative_path is not None and not os.path.isabs(cumulative_path): + cumulative_path = os.path.join(parent_include_dir, cumulative_path) + else: + cumulative_path = parent_include_dir + include_target = templar.template(include_result['include']) + if original_task._role: + new_basedir = os.path.join(original_task._role._role_path, 'tasks', cumulative_path) + candidates = [loader.path_dwim_relative(original_task._role._role_path, 'tasks', include_target), + loader.path_dwim_relative(new_basedir, 'tasks', include_target)] + for include_file in candidates: + try: + # may throw OSError + os.stat(include_file) + # or select the task file if it exists + break + except OSError: + pass + else: + include_file = loader.path_dwim_relative(loader.get_basedir(), cumulative_path, include_target) + + if os.path.exists(include_file): + break + else: + parent_include = parent_include._parent + + if include_file is None: + if original_task._role: + include_target = templar.template(include_result['include']) + include_file = loader.path_dwim_relative( + original_task._role._role_path, + 'handlers' if isinstance(original_task, Handler) else 'tasks', + include_target, + is_role=True) + else: + include_file = loader.path_dwim(include_result['include']) + + include_file = templar.template(include_file) + inc_file = IncludedFile(include_file, include_args, special_vars, original_task) + else: + # template the included role's name here + role_name = include_args.pop('name', include_args.pop('role', None)) + if role_name is not None: + role_name = templar.template(role_name) + + new_task = original_task.copy() + new_task._role_name = role_name + for from_arg in new_task.FROM_ARGS: + if from_arg in include_args: + from_key = from_arg.removesuffix('_from') + new_task._from_files[from_key] = templar.template(include_args.pop(from_arg)) + + omit_token = task_vars.get('omit') + if omit_token: + new_task._from_files = remove_omit(new_task._from_files, omit_token) + + inc_file = IncludedFile(role_name, include_args, special_vars, new_task, is_role=True) + + idx = 0 + orig_inc_file = inc_file + while 1: + try: + pos = included_files[idx:].index(orig_inc_file) + # pos is relative to idx since we are slicing + # use idx + pos due to relative indexing + inc_file = included_files[idx + pos] + except ValueError: + included_files.append(orig_inc_file) + inc_file = orig_inc_file + + try: + inc_file.add_host(original_host) + inc_file._results.append(res) + except ValueError: + # The host already exists for this include, advance forward, this is a new include + idx += pos + 1 + else: + break + + return included_files diff --git a/lib/ansible/playbook/loop_control.py b/lib/ansible/playbook/loop_control.py new file mode 100644 index 0000000..2f56166 --- /dev/null +++ b/lib/ansible/playbook/loop_control.py @@ -0,0 +1,41 @@ +# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +from ansible.playbook.attribute import FieldAttribute +from ansible.playbook.base import FieldAttributeBase + + +class LoopControl(FieldAttributeBase): + + loop_var = FieldAttribute(isa='str', default='item', always_post_validate=True) + index_var = FieldAttribute(isa='str', always_post_validate=True) + label = FieldAttribute(isa='str') + pause = FieldAttribute(isa='float', default=0, always_post_validate=True) + extended = FieldAttribute(isa='bool', always_post_validate=True) + extended_allitems = FieldAttribute(isa='bool', default=True, always_post_validate=True) + + def __init__(self): + super(LoopControl, self).__init__() + + @staticmethod + def load(data, variable_manager=None, loader=None): + t = LoopControl() + return t.load_data(data, variable_manager=variable_manager, loader=loader) diff --git a/lib/ansible/playbook/play.py b/lib/ansible/playbook/play.py new file mode 100644 index 0000000..23bb36b --- /dev/null +++ b/lib/ansible/playbook/play.py @@ -0,0 +1,401 @@ +# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +from ansible import constants as C +from ansible import context +from ansible.errors import AnsibleParserError, AnsibleAssertionError +from ansible.module_utils._text import to_native +from ansible.module_utils.common.collections import is_sequence +from ansible.module_utils.six import binary_type, string_types, text_type +from ansible.playbook.attribute import FieldAttribute +from ansible.playbook.base import Base +from ansible.playbook.block import Block +from ansible.playbook.collectionsearch import CollectionSearch +from ansible.playbook.helpers import load_list_of_blocks, load_list_of_roles +from ansible.playbook.role import Role +from ansible.playbook.task import Task +from ansible.playbook.taggable import Taggable +from ansible.vars.manager import preprocess_vars +from ansible.utils.display import Display + +display = Display() + + +__all__ = ['Play'] + + +class Play(Base, Taggable, CollectionSearch): + + """ + A play is a language feature that represents a list of roles and/or + task/handler blocks to execute on a given set of hosts. + + Usage: + + Play.load(datastructure) -> Play + Play.something(...) + """ + + # ================================================================================= + hosts = FieldAttribute(isa='list', required=True, listof=string_types, always_post_validate=True, priority=-2) + + # Facts + gather_facts = FieldAttribute(isa='bool', default=None, always_post_validate=True) + + # defaults to be deprecated, should be 'None' in future + gather_subset = FieldAttribute(isa='list', default=(lambda: C.DEFAULT_GATHER_SUBSET), listof=string_types, always_post_validate=True) + gather_timeout = FieldAttribute(isa='int', default=C.DEFAULT_GATHER_TIMEOUT, always_post_validate=True) + fact_path = FieldAttribute(isa='string', default=C.DEFAULT_FACT_PATH) + + # Variable Attributes + vars_files = FieldAttribute(isa='list', default=list, priority=99) + vars_prompt = FieldAttribute(isa='list', default=list, always_post_validate=False) + + # Role Attributes + roles = FieldAttribute(isa='list', default=list, priority=90) + + # Block (Task) Lists Attributes + handlers = FieldAttribute(isa='list', default=list, priority=-1) + pre_tasks = FieldAttribute(isa='list', default=list, priority=-1) + post_tasks = FieldAttribute(isa='list', default=list, priority=-1) + tasks = FieldAttribute(isa='list', default=list, priority=-1) + + # Flag/Setting Attributes + force_handlers = FieldAttribute(isa='bool', default=context.cliargs_deferred_get('force_handlers'), always_post_validate=True) + max_fail_percentage = FieldAttribute(isa='percent', always_post_validate=True) + serial = FieldAttribute(isa='list', default=list, always_post_validate=True) + strategy = FieldAttribute(isa='string', default=C.DEFAULT_STRATEGY, always_post_validate=True) + order = FieldAttribute(isa='string', always_post_validate=True) + + # ================================================================================= + + def __init__(self): + super(Play, self).__init__() + + self._included_conditional = None + self._included_path = None + self._removed_hosts = [] + self.ROLE_CACHE = {} + + self.only_tags = set(context.CLIARGS.get('tags', [])) or frozenset(('all',)) + self.skip_tags = set(context.CLIARGS.get('skip_tags', [])) + + self._action_groups = {} + self._group_actions = {} + + def __repr__(self): + return self.get_name() + + def _validate_hosts(self, attribute, name, value): + # Only validate 'hosts' if a value was passed in to original data set. + if 'hosts' in self._ds: + if not value: + raise AnsibleParserError("Hosts list cannot be empty. Please check your playbook") + + if is_sequence(value): + # Make sure each item in the sequence is a valid string + for entry in value: + if entry is None: + raise AnsibleParserError("Hosts list cannot contain values of 'None'. Please check your playbook") + elif not isinstance(entry, (binary_type, text_type)): + raise AnsibleParserError("Hosts list contains an invalid host value: '{host!s}'".format(host=entry)) + + elif not isinstance(value, (binary_type, text_type)): + raise AnsibleParserError("Hosts list must be a sequence or string. Please check your playbook.") + + def get_name(self): + ''' return the name of the Play ''' + if self.name: + return self.name + + if is_sequence(self.hosts): + self.name = ','.join(self.hosts) + else: + self.name = self.hosts or '' + + return self.name + + @staticmethod + def load(data, variable_manager=None, loader=None, vars=None): + p = Play() + if vars: + p.vars = vars.copy() + return p.load_data(data, variable_manager=variable_manager, loader=loader) + + def preprocess_data(self, ds): + ''' + Adjusts play datastructure to cleanup old/legacy items + ''' + + if not isinstance(ds, dict): + raise AnsibleAssertionError('while preprocessing data (%s), ds should be a dict but was a %s' % (ds, type(ds))) + + # The use of 'user' in the Play datastructure was deprecated to + # line up with the same change for Tasks, due to the fact that + # 'user' conflicted with the user module. + if 'user' in ds: + # this should never happen, but error out with a helpful message + # to the user if it does... + if 'remote_user' in ds: + raise AnsibleParserError("both 'user' and 'remote_user' are set for this play. " + "The use of 'user' is deprecated, and should be removed", obj=ds) + + ds['remote_user'] = ds['user'] + del ds['user'] + + return super(Play, self).preprocess_data(ds) + + def _load_tasks(self, attr, ds): + ''' + Loads a list of blocks from a list which may be mixed tasks/blocks. + Bare tasks outside of a block are given an implicit block. + ''' + try: + return load_list_of_blocks(ds=ds, play=self, variable_manager=self._variable_manager, loader=self._loader) + except AssertionError as e: + raise AnsibleParserError("A malformed block was encountered while loading tasks: %s" % to_native(e), obj=self._ds, orig_exc=e) + + def _load_pre_tasks(self, attr, ds): + ''' + Loads a list of blocks from a list which may be mixed tasks/blocks. + Bare tasks outside of a block are given an implicit block. + ''' + try: + return load_list_of_blocks(ds=ds, play=self, variable_manager=self._variable_manager, loader=self._loader) + except AssertionError as e: + raise AnsibleParserError("A malformed block was encountered while loading pre_tasks", obj=self._ds, orig_exc=e) + + def _load_post_tasks(self, attr, ds): + ''' + Loads a list of blocks from a list which may be mixed tasks/blocks. + Bare tasks outside of a block are given an implicit block. + ''' + try: + return load_list_of_blocks(ds=ds, play=self, variable_manager=self._variable_manager, loader=self._loader) + except AssertionError as e: + raise AnsibleParserError("A malformed block was encountered while loading post_tasks", obj=self._ds, orig_exc=e) + + def _load_handlers(self, attr, ds): + ''' + Loads a list of blocks from a list which may be mixed handlers/blocks. + Bare handlers outside of a block are given an implicit block. + ''' + try: + return self._extend_value( + self.handlers, + load_list_of_blocks(ds=ds, play=self, use_handlers=True, variable_manager=self._variable_manager, loader=self._loader), + prepend=True + ) + except AssertionError as e: + raise AnsibleParserError("A malformed block was encountered while loading handlers", obj=self._ds, orig_exc=e) + + def _load_roles(self, attr, ds): + ''' + Loads and returns a list of RoleInclude objects from the datastructure + list of role definitions and creates the Role from those objects + ''' + + if ds is None: + ds = [] + + try: + role_includes = load_list_of_roles(ds, play=self, variable_manager=self._variable_manager, + loader=self._loader, collection_search_list=self.collections) + except AssertionError as e: + raise AnsibleParserError("A malformed role declaration was encountered.", obj=self._ds, orig_exc=e) + + roles = [] + for ri in role_includes: + roles.append(Role.load(ri, play=self)) + + self.roles[:0] = roles + + return self.roles + + def _load_vars_prompt(self, attr, ds): + new_ds = preprocess_vars(ds) + vars_prompts = [] + if new_ds is not None: + for prompt_data in new_ds: + if 'name' not in prompt_data: + raise AnsibleParserError("Invalid vars_prompt data structure, missing 'name' key", obj=ds) + for key in prompt_data: + if key not in ('name', 'prompt', 'default', 'private', 'confirm', 'encrypt', 'salt_size', 'salt', 'unsafe'): + raise AnsibleParserError("Invalid vars_prompt data structure, found unsupported key '%s'" % key, obj=ds) + vars_prompts.append(prompt_data) + return vars_prompts + + def _compile_roles(self): + ''' + Handles the role compilation step, returning a flat list of tasks + with the lowest level dependencies first. For example, if a role R + has a dependency D1, which also has a dependency D2, the tasks from + D2 are merged first, followed by D1, and lastly by the tasks from + the parent role R last. This is done for all roles in the Play. + ''' + + block_list = [] + + if len(self.roles) > 0: + for r in self.roles: + # Don't insert tasks from ``import/include_role``, preventing + # duplicate execution at the wrong time + if r.from_include: + continue + block_list.extend(r.compile(play=self)) + + return block_list + + def compile_roles_handlers(self): + ''' + Handles the role handler compilation step, returning a flat list of Handlers + This is done for all roles in the Play. + ''' + + block_list = [] + + if len(self.roles) > 0: + for r in self.roles: + if r.from_include: + continue + block_list.extend(r.get_handler_blocks(play=self)) + + return block_list + + def compile(self): + ''' + Compiles and returns the task list for this play, compiled from the + roles (which are themselves compiled recursively) and/or the list of + tasks specified in the play. + ''' + + # create a block containing a single flush handlers meta + # task, so we can be sure to run handlers at certain points + # of the playbook execution + flush_block = Block.load( + data={'meta': 'flush_handlers'}, + play=self, + variable_manager=self._variable_manager, + loader=self._loader + ) + + for task in flush_block.block: + task.implicit = True + + block_list = [] + if self.force_handlers: + noop_task = Task() + noop_task.action = 'meta' + noop_task.args['_raw_params'] = 'noop' + noop_task.implicit = True + noop_task.set_loader(self._loader) + + b = Block(play=self) + b.block = self.pre_tasks or [noop_task] + b.always = [flush_block] + block_list.append(b) + + tasks = self._compile_roles() + self.tasks + b = Block(play=self) + b.block = tasks or [noop_task] + b.always = [flush_block] + block_list.append(b) + + b = Block(play=self) + b.block = self.post_tasks or [noop_task] + b.always = [flush_block] + block_list.append(b) + + return block_list + + block_list.extend(self.pre_tasks) + block_list.append(flush_block) + block_list.extend(self._compile_roles()) + block_list.extend(self.tasks) + block_list.append(flush_block) + block_list.extend(self.post_tasks) + block_list.append(flush_block) + + return block_list + + def get_vars(self): + return self.vars.copy() + + def get_vars_files(self): + if self.vars_files is None: + return [] + elif not isinstance(self.vars_files, list): + return [self.vars_files] + return self.vars_files + + def get_handlers(self): + return self.handlers[:] + + def get_roles(self): + return self.roles[:] + + def get_tasks(self): + tasklist = [] + for task in self.pre_tasks + self.tasks + self.post_tasks: + if isinstance(task, Block): + tasklist.append(task.block + task.rescue + task.always) + else: + tasklist.append(task) + return tasklist + + def serialize(self): + data = super(Play, self).serialize() + + roles = [] + for role in self.get_roles(): + roles.append(role.serialize()) + data['roles'] = roles + data['included_path'] = self._included_path + data['action_groups'] = self._action_groups + data['group_actions'] = self._group_actions + + return data + + def deserialize(self, data): + super(Play, self).deserialize(data) + + self._included_path = data.get('included_path', None) + self._action_groups = data.get('action_groups', {}) + self._group_actions = data.get('group_actions', {}) + if 'roles' in data: + role_data = data.get('roles', []) + roles = [] + for role in role_data: + r = Role() + r.deserialize(role) + roles.append(r) + + setattr(self, 'roles', roles) + del data['roles'] + + def copy(self): + new_me = super(Play, self).copy() + new_me.ROLE_CACHE = self.ROLE_CACHE.copy() + new_me._included_conditional = self._included_conditional + new_me._included_path = self._included_path + new_me._action_groups = self._action_groups + new_me._group_actions = self._group_actions + return new_me diff --git a/lib/ansible/playbook/play_context.py b/lib/ansible/playbook/play_context.py new file mode 100644 index 0000000..90de929 --- /dev/null +++ b/lib/ansible/playbook/play_context.py @@ -0,0 +1,354 @@ +# -*- coding: utf-8 -*- + +# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +from ansible import constants as C +from ansible import context +from ansible.module_utils.compat.paramiko import paramiko +from ansible.playbook.attribute import FieldAttribute +from ansible.playbook.base import Base +from ansible.utils.display import Display +from ansible.utils.ssh_functions import check_for_controlpersist + + +display = Display() + + +__all__ = ['PlayContext'] + + +TASK_ATTRIBUTE_OVERRIDES = ( + 'become', + 'become_user', + 'become_pass', + 'become_method', + 'become_flags', + 'connection', + 'docker_extra_args', # TODO: remove + 'delegate_to', + 'no_log', + 'remote_user', +) + +RESET_VARS = ( + 'ansible_connection', + 'ansible_user', + 'ansible_host', + 'ansible_port', + + # TODO: ??? + 'ansible_docker_extra_args', + 'ansible_ssh_host', + 'ansible_ssh_pass', + 'ansible_ssh_port', + 'ansible_ssh_user', + 'ansible_ssh_private_key_file', + 'ansible_ssh_pipelining', + 'ansible_ssh_executable', +) + + +class PlayContext(Base): + + ''' + This class is used to consolidate the connection information for + hosts in a play and child tasks, where the task may override some + connection/authentication information. + ''' + + # base + module_compression = FieldAttribute(isa='string', default=C.DEFAULT_MODULE_COMPRESSION) + shell = FieldAttribute(isa='string') + executable = FieldAttribute(isa='string', default=C.DEFAULT_EXECUTABLE) + + # connection fields, some are inherited from Base: + # (connection, port, remote_user, environment, no_log) + remote_addr = FieldAttribute(isa='string') + password = FieldAttribute(isa='string') + timeout = FieldAttribute(isa='int', default=C.DEFAULT_TIMEOUT) + connection_user = FieldAttribute(isa='string') + private_key_file = FieldAttribute(isa='string', default=C.DEFAULT_PRIVATE_KEY_FILE) + pipelining = FieldAttribute(isa='bool', default=C.ANSIBLE_PIPELINING) + + # networking modules + network_os = FieldAttribute(isa='string') + + # docker FIXME: remove these + docker_extra_args = FieldAttribute(isa='string') + + # ??? + connection_lockfd = FieldAttribute(isa='int') + + # privilege escalation fields + become = FieldAttribute(isa='bool') + become_method = FieldAttribute(isa='string') + become_user = FieldAttribute(isa='string') + become_pass = FieldAttribute(isa='string') + become_exe = FieldAttribute(isa='string', default=C.DEFAULT_BECOME_EXE) + become_flags = FieldAttribute(isa='string', default=C.DEFAULT_BECOME_FLAGS) + prompt = FieldAttribute(isa='string') + + # general flags + only_tags = FieldAttribute(isa='set', default=set) + skip_tags = FieldAttribute(isa='set', default=set) + + start_at_task = FieldAttribute(isa='string') + step = FieldAttribute(isa='bool', default=False) + + # "PlayContext.force_handlers should not be used, the calling code should be using play itself instead" + force_handlers = FieldAttribute(isa='bool', default=False) + + @property + def verbosity(self): + display.deprecated( + "PlayContext.verbosity is deprecated, use ansible.utils.display.Display.verbosity instead.", + version=2.18 + ) + return self._internal_verbosity + + @verbosity.setter + def verbosity(self, value): + display.deprecated( + "PlayContext.verbosity is deprecated, use ansible.utils.display.Display.verbosity instead.", + version=2.18 + ) + self._internal_verbosity = value + + def __init__(self, play=None, passwords=None, connection_lockfd=None): + # Note: play is really not optional. The only time it could be omitted is when we create + # a PlayContext just so we can invoke its deserialize method to load it from a serialized + # data source. + + super(PlayContext, self).__init__() + + if passwords is None: + passwords = {} + + self.password = passwords.get('conn_pass', '') + self.become_pass = passwords.get('become_pass', '') + + self._become_plugin = None + + self.prompt = '' + self.success_key = '' + + # a file descriptor to be used during locking operations + self.connection_lockfd = connection_lockfd + + # set options before play to allow play to override them + if context.CLIARGS: + self.set_attributes_from_cli() + else: + self._internal_verbosity = 0 + + if play: + self.set_attributes_from_play(play) + + def set_attributes_from_plugin(self, plugin): + # generic derived from connection plugin, temporary for backwards compat, in the end we should not set play_context properties + + # get options for plugins + options = C.config.get_configuration_definitions(plugin.plugin_type, plugin._load_name) + for option in options: + if option: + flag = options[option].get('name') + if flag: + setattr(self, flag, plugin.get_option(flag)) + + def set_attributes_from_play(self, play): + self.force_handlers = play.force_handlers + + def set_attributes_from_cli(self): + ''' + Configures this connection information instance with data from + options specified by the user on the command line. These have a + lower precedence than those set on the play or host. + ''' + if context.CLIARGS.get('timeout', False): + self.timeout = int(context.CLIARGS['timeout']) + + # From the command line. These should probably be used directly by plugins instead + # For now, they are likely to be moved to FieldAttribute defaults + self.private_key_file = context.CLIARGS.get('private_key_file') # Else default + self._internal_verbosity = context.CLIARGS.get('verbosity') # Else default + + # Not every cli that uses PlayContext has these command line args so have a default + self.start_at_task = context.CLIARGS.get('start_at_task', None) # Else default + + def set_task_and_variable_override(self, task, variables, templar): + ''' + Sets attributes from the task if they are set, which will override + those from the play. + + :arg task: the task object with the parameters that were set on it + :arg variables: variables from inventory + :arg templar: templar instance if templating variables is needed + ''' + + new_info = self.copy() + + # loop through a subset of attributes on the task object and set + # connection fields based on their values + for attr in TASK_ATTRIBUTE_OVERRIDES: + if (attr_val := getattr(task, attr, None)) is not None: + setattr(new_info, attr, attr_val) + + # next, use the MAGIC_VARIABLE_MAPPING dictionary to update this + # connection info object with 'magic' variables from the variable list. + # If the value 'ansible_delegated_vars' is in the variables, it means + # we have a delegated-to host, so we check there first before looking + # at the variables in general + if task.delegate_to is not None: + # In the case of a loop, the delegated_to host may have been + # templated based on the loop variable, so we try and locate + # the host name in the delegated variable dictionary here + delegated_host_name = templar.template(task.delegate_to) + delegated_vars = variables.get('ansible_delegated_vars', dict()).get(delegated_host_name, dict()) + + delegated_transport = C.DEFAULT_TRANSPORT + for transport_var in C.MAGIC_VARIABLE_MAPPING.get('connection'): + if transport_var in delegated_vars: + delegated_transport = delegated_vars[transport_var] + break + + # make sure this delegated_to host has something set for its remote + # address, otherwise we default to connecting to it by name. This + # may happen when users put an IP entry into their inventory, or if + # they rely on DNS for a non-inventory hostname + for address_var in ('ansible_%s_host' % delegated_transport,) + C.MAGIC_VARIABLE_MAPPING.get('remote_addr'): + if address_var in delegated_vars: + break + else: + display.debug("no remote address found for delegated host %s\nusing its name, so success depends on DNS resolution" % delegated_host_name) + delegated_vars['ansible_host'] = delegated_host_name + + # reset the port back to the default if none was specified, to prevent + # the delegated host from inheriting the original host's setting + for port_var in ('ansible_%s_port' % delegated_transport,) + C.MAGIC_VARIABLE_MAPPING.get('port'): + if port_var in delegated_vars: + break + else: + if delegated_transport == 'winrm': + delegated_vars['ansible_port'] = 5986 + else: + delegated_vars['ansible_port'] = C.DEFAULT_REMOTE_PORT + + # and likewise for the remote user + for user_var in ('ansible_%s_user' % delegated_transport,) + C.MAGIC_VARIABLE_MAPPING.get('remote_user'): + if user_var in delegated_vars and delegated_vars[user_var]: + break + else: + delegated_vars['ansible_user'] = task.remote_user or self.remote_user + else: + delegated_vars = dict() + + # setup shell + for exe_var in C.MAGIC_VARIABLE_MAPPING.get('executable'): + if exe_var in variables: + setattr(new_info, 'executable', variables.get(exe_var)) + + attrs_considered = [] + for (attr, variable_names) in C.MAGIC_VARIABLE_MAPPING.items(): + for variable_name in variable_names: + if attr in attrs_considered: + continue + # if delegation task ONLY use delegated host vars, avoid delegated FOR host vars + if task.delegate_to is not None: + if isinstance(delegated_vars, dict) and variable_name in delegated_vars: + setattr(new_info, attr, delegated_vars[variable_name]) + attrs_considered.append(attr) + elif variable_name in variables: + setattr(new_info, attr, variables[variable_name]) + attrs_considered.append(attr) + # no else, as no other vars should be considered + + # become legacy updates -- from inventory file (inventory overrides + # commandline) + for become_pass_name in C.MAGIC_VARIABLE_MAPPING.get('become_pass'): + if become_pass_name in variables: + break + + # make sure we get port defaults if needed + if new_info.port is None and C.DEFAULT_REMOTE_PORT is not None: + new_info.port = int(C.DEFAULT_REMOTE_PORT) + + # special overrides for the connection setting + if len(delegated_vars) > 0: + # in the event that we were using local before make sure to reset the + # connection type to the default transport for the delegated-to host, + # if not otherwise specified + for connection_type in C.MAGIC_VARIABLE_MAPPING.get('connection'): + if connection_type in delegated_vars: + break + else: + remote_addr_local = new_info.remote_addr in C.LOCALHOST + inv_hostname_local = delegated_vars.get('inventory_hostname') in C.LOCALHOST + if remote_addr_local and inv_hostname_local: + setattr(new_info, 'connection', 'local') + elif getattr(new_info, 'connection', None) == 'local' and (not remote_addr_local or not inv_hostname_local): + setattr(new_info, 'connection', C.DEFAULT_TRANSPORT) + + # we store original in 'connection_user' for use of network/other modules that fallback to it as login user + # connection_user to be deprecated once connection=local is removed for, as local resets remote_user + if new_info.connection == 'local': + if not new_info.connection_user: + new_info.connection_user = new_info.remote_user + + # for case in which connection plugin still uses pc.remote_addr and in it's own options + # specifies 'default: inventory_hostname', but never added to vars: + if new_info.remote_addr == 'inventory_hostname': + new_info.remote_addr = variables.get('inventory_hostname') + display.warning('The "%s" connection plugin has an improperly configured remote target value, ' + 'forcing "inventory_hostname" templated value instead of the string' % new_info.connection) + + # set no_log to default if it was not previously set + if new_info.no_log is None: + new_info.no_log = C.DEFAULT_NO_LOG + + if task.check_mode is not None: + new_info.check_mode = task.check_mode + + if task.diff is not None: + new_info.diff = task.diff + + return new_info + + def set_become_plugin(self, plugin): + self._become_plugin = plugin + + def update_vars(self, variables): + ''' + Adds 'magic' variables relating to connections to the variable dictionary provided. + In case users need to access from the play, this is a legacy from runner. + ''' + + for prop, var_list in C.MAGIC_VARIABLE_MAPPING.items(): + try: + if 'become' in prop: + continue + + var_val = getattr(self, prop) + for var_opt in var_list: + if var_opt not in variables and var_val is not None: + variables[var_opt] = var_val + except AttributeError: + continue diff --git a/lib/ansible/playbook/playbook_include.py b/lib/ansible/playbook/playbook_include.py new file mode 100644 index 0000000..03210ea --- /dev/null +++ b/lib/ansible/playbook/playbook_include.py @@ -0,0 +1,171 @@ +# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os + +import ansible.constants as C +from ansible.errors import AnsibleParserError, AnsibleAssertionError +from ansible.module_utils._text import to_bytes +from ansible.module_utils.six import string_types +from ansible.parsing.splitter import split_args, parse_kv +from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping +from ansible.playbook.attribute import FieldAttribute +from ansible.playbook.base import Base +from ansible.playbook.conditional import Conditional +from ansible.playbook.taggable import Taggable +from ansible.utils.collection_loader import AnsibleCollectionConfig +from ansible.utils.collection_loader._collection_finder import _get_collection_name_from_path, _get_collection_playbook_path +from ansible.template import Templar +from ansible.utils.display import Display + +display = Display() + + +class PlaybookInclude(Base, Conditional, Taggable): + + import_playbook = FieldAttribute(isa='string') + vars_val = FieldAttribute(isa='dict', default=dict, alias='vars') + + @staticmethod + def load(data, basedir, variable_manager=None, loader=None): + return PlaybookInclude().load_data(ds=data, basedir=basedir, variable_manager=variable_manager, loader=loader) + + def load_data(self, ds, basedir, variable_manager=None, loader=None): + ''' + Overrides the base load_data(), as we're actually going to return a new + Playbook() object rather than a PlaybookInclude object + ''' + + # import here to avoid a dependency loop + from ansible.playbook import Playbook + from ansible.playbook.play import Play + + # first, we use the original parent method to correctly load the object + # via the load_data/preprocess_data system we normally use for other + # playbook objects + new_obj = super(PlaybookInclude, self).load_data(ds, variable_manager, loader) + + all_vars = self.vars.copy() + if variable_manager: + all_vars |= variable_manager.get_vars() + + templar = Templar(loader=loader, variables=all_vars) + + # then we use the object to load a Playbook + pb = Playbook(loader=loader) + + file_name = templar.template(new_obj.import_playbook) + + # check for FQCN + resource = _get_collection_playbook_path(file_name) + if resource is not None: + playbook = resource[1] + playbook_collection = resource[2] + else: + # not FQCN try path + playbook = file_name + if not os.path.isabs(playbook): + playbook = os.path.join(basedir, playbook) + + # might still be collection playbook + playbook_collection = _get_collection_name_from_path(playbook) + + if playbook_collection: + # it is a collection playbook, setup default collections + AnsibleCollectionConfig.default_collection = playbook_collection + else: + # it is NOT a collection playbook, setup adjecent paths + AnsibleCollectionConfig.playbook_paths.append(os.path.dirname(os.path.abspath(to_bytes(playbook, errors='surrogate_or_strict')))) + + pb._load_playbook_data(file_name=playbook, variable_manager=variable_manager, vars=self.vars.copy()) + + # finally, update each loaded playbook entry with any variables specified + # on the included playbook and/or any tags which may have been set + for entry in pb._entries: + + # conditional includes on a playbook need a marker to skip gathering + if new_obj.when and isinstance(entry, Play): + entry._included_conditional = new_obj.when[:] + + temp_vars = entry.vars | new_obj.vars + param_tags = temp_vars.pop('tags', None) + if param_tags is not None: + entry.tags.extend(param_tags.split(',')) + entry.vars = temp_vars + entry.tags = list(set(entry.tags).union(new_obj.tags)) + if entry._included_path is None: + entry._included_path = os.path.dirname(playbook) + + # Check to see if we need to forward the conditionals on to the included + # plays. If so, we can take a shortcut here and simply prepend them to + # those attached to each block (if any) + if new_obj.when: + for task_block in (entry.pre_tasks + entry.roles + entry.tasks + entry.post_tasks): + task_block._when = new_obj.when[:] + task_block.when[:] + + return pb + + def preprocess_data(self, ds): + ''' + Regorganizes the data for a PlaybookInclude datastructure to line + up with what we expect the proper attributes to be + ''' + + if not isinstance(ds, dict): + raise AnsibleAssertionError('ds (%s) should be a dict but was a %s' % (ds, type(ds))) + + # the new, cleaned datastructure, which will have legacy + # items reduced to a standard structure + new_ds = AnsibleMapping() + if isinstance(ds, AnsibleBaseYAMLObject): + new_ds.ansible_pos = ds.ansible_pos + + for (k, v) in ds.items(): + if k in C._ACTION_IMPORT_PLAYBOOK: + self._preprocess_import(ds, new_ds, k, v) + else: + # some basic error checking, to make sure vars are properly + # formatted and do not conflict with k=v parameters + if k == 'vars': + if 'vars' in new_ds: + raise AnsibleParserError("import_playbook parameters cannot be mixed with 'vars' entries for import statements", obj=ds) + elif not isinstance(v, dict): + raise AnsibleParserError("vars for import_playbook statements must be specified as a dictionary", obj=ds) + new_ds[k] = v + + return super(PlaybookInclude, self).preprocess_data(new_ds) + + def _preprocess_import(self, ds, new_ds, k, v): + ''' + Splits the playbook import line up into filename and parameters + ''' + if v is None: + raise AnsibleParserError("playbook import parameter is missing", obj=ds) + elif not isinstance(v, string_types): + raise AnsibleParserError("playbook import parameter must be a string indicating a file path, got %s instead" % type(v), obj=ds) + + # The import_playbook line must include at least one item, which is the filename + # to import. Anything after that should be regarded as a parameter to the import + items = split_args(v) + if len(items) == 0: + raise AnsibleParserError("import_playbook statements must specify the file name to import", obj=ds) + + new_ds['import_playbook'] = items[0].strip() diff --git a/lib/ansible/playbook/role/__init__.py b/lib/ansible/playbook/role/__init__.py new file mode 100644 index 0000000..0409609 --- /dev/null +++ b/lib/ansible/playbook/role/__init__.py @@ -0,0 +1,664 @@ +# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os + +from collections.abc import Container, Mapping, Set, Sequence + +from ansible import constants as C +from ansible.errors import AnsibleError, AnsibleParserError, AnsibleAssertionError +from ansible.module_utils._text import to_text +from ansible.module_utils.six import binary_type, text_type +from ansible.playbook.attribute import FieldAttribute +from ansible.playbook.base import Base +from ansible.playbook.collectionsearch import CollectionSearch +from ansible.playbook.conditional import Conditional +from ansible.playbook.helpers import load_list_of_blocks +from ansible.playbook.role.metadata import RoleMetadata +from ansible.playbook.taggable import Taggable +from ansible.plugins.loader import add_all_plugin_dirs +from ansible.utils.collection_loader import AnsibleCollectionConfig +from ansible.utils.path import is_subpath +from ansible.utils.sentinel import Sentinel +from ansible.utils.vars import combine_vars + +__all__ = ['Role', 'hash_params'] + +# TODO: this should be a utility function, but can't be a member of +# the role due to the fact that it would require the use of self +# in a static method. This is also used in the base class for +# strategies (ansible/plugins/strategy/__init__.py) + + +def hash_params(params): + """ + Construct a data structure of parameters that is hashable. + + This requires changing any mutable data structures into immutable ones. + We chose a frozenset because role parameters have to be unique. + + .. warning:: this does not handle unhashable scalars. Two things + mitigate that limitation: + + 1) There shouldn't be any unhashable scalars specified in the yaml + 2) Our only choice would be to return an error anyway. + """ + # Any container is unhashable if it contains unhashable items (for + # instance, tuple() is a Hashable subclass but if it contains a dict, it + # cannot be hashed) + if isinstance(params, Container) and not isinstance(params, (text_type, binary_type)): + if isinstance(params, Mapping): + try: + # Optimistically hope the contents are all hashable + new_params = frozenset(params.items()) + except TypeError: + new_params = set() + for k, v in params.items(): + # Hash each entry individually + new_params.add((k, hash_params(v))) + new_params = frozenset(new_params) + + elif isinstance(params, (Set, Sequence)): + try: + # Optimistically hope the contents are all hashable + new_params = frozenset(params) + except TypeError: + new_params = set() + for v in params: + # Hash each entry individually + new_params.add(hash_params(v)) + new_params = frozenset(new_params) + else: + # This is just a guess. + new_params = frozenset(params) + return new_params + + # Note: We do not handle unhashable scalars but our only choice would be + # to raise an error there anyway. + return frozenset((params,)) + + +class Role(Base, Conditional, Taggable, CollectionSearch): + + delegate_to = FieldAttribute(isa='string') + delegate_facts = FieldAttribute(isa='bool') + + def __init__(self, play=None, from_files=None, from_include=False, validate=True): + self._role_name = None + self._role_path = None + self._role_collection = None + self._role_params = dict() + self._loader = None + + self._metadata = None + self._play = play + self._parents = [] + self._dependencies = [] + self._task_blocks = [] + self._handler_blocks = [] + self._compiled_handler_blocks = None + self._default_vars = dict() + self._role_vars = dict() + self._had_task_run = dict() + self._completed = dict() + self._should_validate = validate + + if from_files is None: + from_files = {} + self._from_files = from_files + + # Indicates whether this role was included via include/import_role + self.from_include = from_include + + super(Role, self).__init__() + + def __repr__(self): + return self.get_name() + + def get_name(self, include_role_fqcn=True): + if include_role_fqcn: + return '.'.join(x for x in (self._role_collection, self._role_name) if x) + return self._role_name + + @staticmethod + def load(role_include, play, parent_role=None, from_files=None, from_include=False, validate=True): + + if from_files is None: + from_files = {} + try: + # The ROLE_CACHE is a dictionary of role names, with each entry + # containing another dictionary corresponding to a set of parameters + # specified for a role as the key and the Role() object itself. + # We use frozenset to make the dictionary hashable. + + params = role_include.get_role_params() + if role_include.when is not None: + params['when'] = role_include.when + if role_include.tags is not None: + params['tags'] = role_include.tags + if from_files is not None: + params['from_files'] = from_files + if role_include.vars: + params['vars'] = role_include.vars + + params['from_include'] = from_include + + hashed_params = hash_params(params) + if role_include.get_name() in play.ROLE_CACHE: + for (entry, role_obj) in play.ROLE_CACHE[role_include.get_name()].items(): + if hashed_params == entry: + if parent_role: + role_obj.add_parent(parent_role) + return role_obj + + # TODO: need to fix cycle detection in role load (maybe use an empty dict + # for the in-flight in role cache as a sentinel that we're already trying to load + # that role?) + # see https://github.com/ansible/ansible/issues/61527 + r = Role(play=play, from_files=from_files, from_include=from_include, validate=validate) + r._load_role_data(role_include, parent_role=parent_role) + + if role_include.get_name() not in play.ROLE_CACHE: + play.ROLE_CACHE[role_include.get_name()] = dict() + + # FIXME: how to handle cache keys for collection-based roles, since they're technically adjustable per task? + play.ROLE_CACHE[role_include.get_name()][hashed_params] = r + return r + + except RuntimeError: + raise AnsibleError("A recursion loop was detected with the roles specified. Make sure child roles do not have dependencies on parent roles", + obj=role_include._ds) + + def _load_role_data(self, role_include, parent_role=None): + self._role_name = role_include.role + self._role_path = role_include.get_role_path() + self._role_collection = role_include._role_collection + self._role_params = role_include.get_role_params() + self._variable_manager = role_include.get_variable_manager() + self._loader = role_include.get_loader() + + if parent_role: + self.add_parent(parent_role) + + # copy over all field attributes from the RoleInclude + # update self._attr directly, to avoid squashing + for attr_name in self.fattributes: + setattr(self, f'_{attr_name}', getattr(role_include, f'_{attr_name}', Sentinel)) + + # vars and default vars are regular dictionaries + self._role_vars = self._load_role_yaml('vars', main=self._from_files.get('vars'), allow_dir=True) + if self._role_vars is None: + self._role_vars = {} + elif not isinstance(self._role_vars, Mapping): + raise AnsibleParserError("The vars/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name) + + self._default_vars = self._load_role_yaml('defaults', main=self._from_files.get('defaults'), allow_dir=True) + if self._default_vars is None: + self._default_vars = {} + elif not isinstance(self._default_vars, Mapping): + raise AnsibleParserError("The defaults/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name) + + # load the role's other files, if they exist + metadata = self._load_role_yaml('meta') + if metadata: + self._metadata = RoleMetadata.load(metadata, owner=self, variable_manager=self._variable_manager, loader=self._loader) + self._dependencies = self._load_dependencies() + else: + self._metadata = RoleMetadata() + + # reset collections list; roles do not inherit collections from parents, just use the defaults + # FUTURE: use a private config default for this so we can allow it to be overridden later + self.collections = [] + + # configure plugin/collection loading; either prepend the current role's collection or configure legacy plugin loading + # FIXME: need exception for explicit ansible.legacy? + if self._role_collection: # this is a collection-hosted role + self.collections.insert(0, self._role_collection) + else: # this is a legacy role, but set the default collection if there is one + default_collection = AnsibleCollectionConfig.default_collection + if default_collection: + self.collections.insert(0, default_collection) + # legacy role, ensure all plugin dirs under the role are added to plugin search path + add_all_plugin_dirs(self._role_path) + + # collections can be specified in metadata for legacy or collection-hosted roles + if self._metadata.collections: + self.collections.extend((c for c in self._metadata.collections if c not in self.collections)) + + # if any collections were specified, ensure that core or legacy synthetic collections are always included + if self.collections: + # default append collection is core for collection-hosted roles, legacy for others + default_append_collection = 'ansible.builtin' if self._role_collection else 'ansible.legacy' + if 'ansible.builtin' not in self.collections and 'ansible.legacy' not in self.collections: + self.collections.append(default_append_collection) + + task_data = self._load_role_yaml('tasks', main=self._from_files.get('tasks')) + + if self._should_validate: + role_argspecs = self._get_role_argspecs() + task_data = self._prepend_validation_task(task_data, role_argspecs) + + if task_data: + try: + self._task_blocks = load_list_of_blocks(task_data, play=self._play, role=self, loader=self._loader, variable_manager=self._variable_manager) + except AssertionError as e: + raise AnsibleParserError("The tasks/main.yml file for role '%s' must contain a list of tasks" % self._role_name, + obj=task_data, orig_exc=e) + + handler_data = self._load_role_yaml('handlers', main=self._from_files.get('handlers')) + if handler_data: + try: + self._handler_blocks = load_list_of_blocks(handler_data, play=self._play, role=self, use_handlers=True, loader=self._loader, + variable_manager=self._variable_manager) + except AssertionError as e: + raise AnsibleParserError("The handlers/main.yml file for role '%s' must contain a list of tasks" % self._role_name, + obj=handler_data, orig_exc=e) + + def _get_role_argspecs(self): + """Get the role argument spec data. + + Role arg specs can be in one of two files in the role meta subdir: argument_specs.yml + or main.yml. The former has precedence over the latter. Data is not combined + between the files. + + :returns: A dict of all data under the top-level ``argument_specs`` YAML key + in the argument spec file. An empty dict is returned if there is no + argspec data. + """ + base_argspec_path = os.path.join(self._role_path, 'meta', 'argument_specs') + + for ext in C.YAML_FILENAME_EXTENSIONS: + full_path = base_argspec_path + ext + if self._loader.path_exists(full_path): + # Note: _load_role_yaml() takes care of rebuilding the path. + argument_specs = self._load_role_yaml('meta', main='argument_specs') + try: + return argument_specs.get('argument_specs') or {} + except AttributeError: + return {} + + # We did not find the meta/argument_specs.[yml|yaml] file, so use the spec + # dict from the role meta data, if it exists. Ansible 2.11 and later will + # have the 'argument_specs' attribute, but earlier versions will not. + return getattr(self._metadata, 'argument_specs', {}) + + def _prepend_validation_task(self, task_data, argspecs): + '''Insert a role validation task if we have a role argument spec. + + This method will prepend a validation task to the front of the role task + list to perform argument spec validation before any other tasks, if an arg spec + exists for the entry point. Entry point defaults to `main`. + + :param task_data: List of tasks loaded from the role. + :param argspecs: The role argument spec data dict. + + :returns: The (possibly modified) task list. + ''' + if argspecs: + # Determine the role entry point so we can retrieve the correct argument spec. + # This comes from the `tasks_from` value to include_role or import_role. + entrypoint = self._from_files.get('tasks', 'main') + entrypoint_arg_spec = argspecs.get(entrypoint) + + if entrypoint_arg_spec: + validation_task = self._create_validation_task(entrypoint_arg_spec, entrypoint) + + # Prepend our validate_argument_spec action to happen before any tasks provided by the role. + # 'any tasks' can and does include 0 or None tasks, in which cases we create a list of tasks and add our + # validate_argument_spec task + if not task_data: + task_data = [] + task_data.insert(0, validation_task) + return task_data + + def _create_validation_task(self, argument_spec, entrypoint_name): + '''Create a new task data structure that uses the validate_argument_spec action plugin. + + :param argument_spec: The arg spec definition for a particular role entry point. + This will be the entire arg spec for the entry point as read from the input file. + :param entrypoint_name: The name of the role entry point associated with the + supplied `argument_spec`. + ''' + + # If the arg spec provides a short description, use it to flesh out the validation task name + task_name = "Validating arguments against arg spec '%s'" % entrypoint_name + if 'short_description' in argument_spec: + task_name = task_name + ' - ' + argument_spec['short_description'] + + return { + 'action': { + 'module': 'ansible.builtin.validate_argument_spec', + # Pass only the 'options' portion of the arg spec to the module. + 'argument_spec': argument_spec.get('options', {}), + 'provided_arguments': self._role_params, + 'validate_args_context': { + 'type': 'role', + 'name': self._role_name, + 'argument_spec_name': entrypoint_name, + 'path': self._role_path + }, + }, + 'name': task_name, + 'tags': ['always'], + } + + def _load_role_yaml(self, subdir, main=None, allow_dir=False): + ''' + Find and load role YAML files and return data found. + :param subdir: subdir of role to search (vars, files, tasks, handlers, defaults) + :type subdir: string + :param main: filename to match, will default to 'main.<ext>' if not provided. + :type main: string + :param allow_dir: If true we combine results of multiple matching files found. + If false, highlander rules. Only for vars(dicts) and not tasks(lists). + :type allow_dir: bool + + :returns: data from the matched file(s), type can be dict or list depending on vars or tasks. + ''' + data = None + file_path = os.path.join(self._role_path, subdir) + if self._loader.path_exists(file_path) and self._loader.is_directory(file_path): + # Valid extensions and ordering for roles is hard-coded to maintain portability + extensions = ['.yml', '.yaml', '.json'] # same as default for YAML_FILENAME_EXTENSIONS + + # look for files w/o extensions before/after bare name depending on it being set or not + # keep 'main' as original to figure out errors if no files found + if main is None: + _main = 'main' + extensions.append('') + else: + _main = main + extensions.insert(0, '') + + # not really 'find_vars_files' but find_files_with_extensions_default_to_yaml_filename_extensions + found_files = self._loader.find_vars_files(file_path, _main, extensions, allow_dir) + if found_files: + for found in found_files: + + if not is_subpath(found, file_path): + raise AnsibleParserError("Failed loading '%s' for role (%s) as it is not inside the expected role path: '%s'" % + (to_text(found), self._role_name, to_text(file_path))) + + new_data = self._loader.load_from_file(found) + if new_data: + if data is not None and isinstance(new_data, Mapping): + data = combine_vars(data, new_data) + else: + data = new_data + + # found data so no need to continue unless we want to merge + if not allow_dir: + break + + elif main is not None: + # this won't trigger with default only when <subdir>_from is specified + raise AnsibleParserError("Could not find specified file in role: %s/%s" % (subdir, main)) + + return data + + def _load_dependencies(self): + ''' + Recursively loads role dependencies from the metadata list of + dependencies, if it exists + ''' + + deps = [] + if self._metadata: + for role_include in self._metadata.dependencies: + r = Role.load(role_include, play=self._play, parent_role=self) + deps.append(r) + + return deps + + # other functions + + def add_parent(self, parent_role): + ''' adds a role to the list of this roles parents ''' + if not isinstance(parent_role, Role): + raise AnsibleAssertionError() + + if parent_role not in self._parents: + self._parents.append(parent_role) + + def get_parents(self): + return self._parents + + def get_default_vars(self, dep_chain=None): + dep_chain = [] if dep_chain is None else dep_chain + + default_vars = dict() + for dep in self.get_all_dependencies(): + default_vars = combine_vars(default_vars, dep.get_default_vars()) + if dep_chain: + for parent in dep_chain: + default_vars = combine_vars(default_vars, parent._default_vars) + default_vars = combine_vars(default_vars, self._default_vars) + return default_vars + + def get_inherited_vars(self, dep_chain=None): + dep_chain = [] if dep_chain is None else dep_chain + + inherited_vars = dict() + + if dep_chain: + for parent in dep_chain: + inherited_vars = combine_vars(inherited_vars, parent.vars) + inherited_vars = combine_vars(inherited_vars, parent._role_vars) + return inherited_vars + + def get_role_params(self, dep_chain=None): + dep_chain = [] if dep_chain is None else dep_chain + + params = {} + if dep_chain: + for parent in dep_chain: + params = combine_vars(params, parent._role_params) + params = combine_vars(params, self._role_params) + return params + + def get_vars(self, dep_chain=None, include_params=True): + dep_chain = [] if dep_chain is None else dep_chain + + all_vars = self.get_inherited_vars(dep_chain) + + for dep in self.get_all_dependencies(): + all_vars = combine_vars(all_vars, dep.get_vars(include_params=include_params)) + + all_vars = combine_vars(all_vars, self.vars) + all_vars = combine_vars(all_vars, self._role_vars) + if include_params: + all_vars = combine_vars(all_vars, self.get_role_params(dep_chain=dep_chain)) + + return all_vars + + def get_direct_dependencies(self): + return self._dependencies[:] + + def get_all_dependencies(self): + ''' + Returns a list of all deps, built recursively from all child dependencies, + in the proper order in which they should be executed or evaluated. + ''' + + child_deps = [] + + for dep in self.get_direct_dependencies(): + for child_dep in dep.get_all_dependencies(): + child_deps.append(child_dep) + child_deps.append(dep) + + return child_deps + + def get_task_blocks(self): + return self._task_blocks[:] + + def get_handler_blocks(self, play, dep_chain=None): + # Do not recreate this list each time ``get_handler_blocks`` is called. + # Cache the results so that we don't potentially overwrite with copied duplicates + # + # ``get_handler_blocks`` may be called when handling ``import_role`` during parsing + # as well as with ``Play.compile_roles_handlers`` from ``TaskExecutor`` + if self._compiled_handler_blocks: + return self._compiled_handler_blocks + + self._compiled_handler_blocks = block_list = [] + + # update the dependency chain here + if dep_chain is None: + dep_chain = [] + new_dep_chain = dep_chain + [self] + + for dep in self.get_direct_dependencies(): + dep_blocks = dep.get_handler_blocks(play=play, dep_chain=new_dep_chain) + block_list.extend(dep_blocks) + + for task_block in self._handler_blocks: + new_task_block = task_block.copy() + new_task_block._dep_chain = new_dep_chain + new_task_block._play = play + block_list.append(new_task_block) + + return block_list + + def has_run(self, host): + ''' + Returns true if this role has been iterated over completely and + at least one task was run + ''' + + return host.name in self._completed and not self._metadata.allow_duplicates + + def compile(self, play, dep_chain=None): + ''' + Returns the task list for this role, which is created by first + recursively compiling the tasks for all direct dependencies, and + then adding on the tasks for this role. + + The role compile() also remembers and saves the dependency chain + with each task, so tasks know by which route they were found, and + can correctly take their parent's tags/conditionals into account. + ''' + from ansible.playbook.block import Block + from ansible.playbook.task import Task + + block_list = [] + + # update the dependency chain here + if dep_chain is None: + dep_chain = [] + new_dep_chain = dep_chain + [self] + + deps = self.get_direct_dependencies() + for dep in deps: + dep_blocks = dep.compile(play=play, dep_chain=new_dep_chain) + block_list.extend(dep_blocks) + + for task_block in self._task_blocks: + new_task_block = task_block.copy() + new_task_block._dep_chain = new_dep_chain + new_task_block._play = play + block_list.append(new_task_block) + + eor_block = Block(play=play) + eor_block._loader = self._loader + eor_block._role = self + eor_block._variable_manager = self._variable_manager + eor_block.run_once = False + + eor_task = Task(block=eor_block) + eor_task._role = self + eor_task.action = 'meta' + eor_task.args = {'_raw_params': 'role_complete'} + eor_task.implicit = True + eor_task.tags = ['always'] + eor_task.when = True + + eor_block.block = [eor_task] + block_list.append(eor_block) + + return block_list + + def serialize(self, include_deps=True): + res = super(Role, self).serialize() + + res['_role_name'] = self._role_name + res['_role_path'] = self._role_path + res['_role_vars'] = self._role_vars + res['_role_params'] = self._role_params + res['_default_vars'] = self._default_vars + res['_had_task_run'] = self._had_task_run.copy() + res['_completed'] = self._completed.copy() + + if self._metadata: + res['_metadata'] = self._metadata.serialize() + + if include_deps: + deps = [] + for role in self.get_direct_dependencies(): + deps.append(role.serialize()) + res['_dependencies'] = deps + + parents = [] + for parent in self._parents: + parents.append(parent.serialize(include_deps=False)) + res['_parents'] = parents + + return res + + def deserialize(self, data, include_deps=True): + self._role_name = data.get('_role_name', '') + self._role_path = data.get('_role_path', '') + self._role_vars = data.get('_role_vars', dict()) + self._role_params = data.get('_role_params', dict()) + self._default_vars = data.get('_default_vars', dict()) + self._had_task_run = data.get('_had_task_run', dict()) + self._completed = data.get('_completed', dict()) + + if include_deps: + deps = [] + for dep in data.get('_dependencies', []): + r = Role() + r.deserialize(dep) + deps.append(r) + setattr(self, '_dependencies', deps) + + parent_data = data.get('_parents', []) + parents = [] + for parent in parent_data: + r = Role() + r.deserialize(parent, include_deps=False) + parents.append(r) + setattr(self, '_parents', parents) + + metadata_data = data.get('_metadata') + if metadata_data: + m = RoleMetadata() + m.deserialize(metadata_data) + self._metadata = m + + super(Role, self).deserialize(data) + + def set_loader(self, loader): + self._loader = loader + for parent in self._parents: + parent.set_loader(loader) + for dep in self.get_direct_dependencies(): + dep.set_loader(loader) diff --git a/lib/ansible/playbook/role/definition.py b/lib/ansible/playbook/role/definition.py new file mode 100644 index 0000000..b27a231 --- /dev/null +++ b/lib/ansible/playbook/role/definition.py @@ -0,0 +1,240 @@ +# (c) 2014 Michael DeHaan, <michael@ansible.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os + +from ansible import constants as C +from ansible.errors import AnsibleError, AnsibleAssertionError +from ansible.module_utils.six import string_types +from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping +from ansible.playbook.attribute import FieldAttribute +from ansible.playbook.base import Base +from ansible.playbook.collectionsearch import CollectionSearch +from ansible.playbook.conditional import Conditional +from ansible.playbook.taggable import Taggable +from ansible.template import Templar +from ansible.utils.collection_loader import AnsibleCollectionRef +from ansible.utils.collection_loader._collection_finder import _get_collection_role_path +from ansible.utils.path import unfrackpath +from ansible.utils.display import Display + +__all__ = ['RoleDefinition'] + +display = Display() + + +class RoleDefinition(Base, Conditional, Taggable, CollectionSearch): + + role = FieldAttribute(isa='string') + + def __init__(self, play=None, role_basedir=None, variable_manager=None, loader=None, collection_list=None): + + super(RoleDefinition, self).__init__() + + self._play = play + self._variable_manager = variable_manager + self._loader = loader + + self._role_path = None + self._role_collection = None + self._role_basedir = role_basedir + self._role_params = dict() + self._collection_list = collection_list + + # def __repr__(self): + # return 'ROLEDEF: ' + self._attributes.get('role', '<no name set>') + + @staticmethod + def load(data, variable_manager=None, loader=None): + raise AnsibleError("not implemented") + + def preprocess_data(self, ds): + # role names that are simply numbers can be parsed by PyYAML + # as integers even when quoted, so turn it into a string type + if isinstance(ds, int): + ds = "%s" % ds + + if not isinstance(ds, dict) and not isinstance(ds, string_types) and not isinstance(ds, AnsibleBaseYAMLObject): + raise AnsibleAssertionError() + + if isinstance(ds, dict): + ds = super(RoleDefinition, self).preprocess_data(ds) + + # save the original ds for use later + self._ds = ds + + # we create a new data structure here, using the same + # object used internally by the YAML parsing code so we + # can preserve file:line:column information if it exists + new_ds = AnsibleMapping() + if isinstance(ds, AnsibleBaseYAMLObject): + new_ds.ansible_pos = ds.ansible_pos + + # first we pull the role name out of the data structure, + # and then use that to determine the role path (which may + # result in a new role name, if it was a file path) + role_name = self._load_role_name(ds) + (role_name, role_path) = self._load_role_path(role_name) + + # next, we split the role params out from the valid role + # attributes and update the new datastructure with that + # result and the role name + if isinstance(ds, dict): + (new_role_def, role_params) = self._split_role_params(ds) + new_ds |= new_role_def + self._role_params = role_params + + # set the role name in the new ds + new_ds['role'] = role_name + + # we store the role path internally + self._role_path = role_path + + # and return the cleaned-up data structure + return new_ds + + def _load_role_name(self, ds): + ''' + Returns the role name (either the role: or name: field) from + the role definition, or (when the role definition is a simple + string), just that string + ''' + + if isinstance(ds, string_types): + return ds + + role_name = ds.get('role', ds.get('name')) + if not role_name or not isinstance(role_name, string_types): + raise AnsibleError('role definitions must contain a role name', obj=ds) + + # if we have the required datastructures, and if the role_name + # contains a variable, try and template it now + if self._variable_manager: + all_vars = self._variable_manager.get_vars(play=self._play) + templar = Templar(loader=self._loader, variables=all_vars) + role_name = templar.template(role_name) + + return role_name + + def _load_role_path(self, role_name): + ''' + the 'role', as specified in the ds (or as a bare string), can either + be a simple name or a full path. If it is a full path, we use the + basename as the role name, otherwise we take the name as-given and + append it to the default role path + ''' + + # create a templar class to template the dependency names, in + # case they contain variables + if self._variable_manager is not None: + all_vars = self._variable_manager.get_vars(play=self._play) + else: + all_vars = dict() + + templar = Templar(loader=self._loader, variables=all_vars) + role_name = templar.template(role_name) + + role_tuple = None + + # try to load as a collection-based role first + if self._collection_list or AnsibleCollectionRef.is_valid_fqcr(role_name): + role_tuple = _get_collection_role_path(role_name, self._collection_list) + + if role_tuple: + # we found it, stash collection data and return the name/path tuple + self._role_collection = role_tuple[2] + return role_tuple[0:2] + + # We didn't find a collection role, look in defined role paths + # FUTURE: refactor this to be callable from internal so we can properly order + # ansible.legacy searches with the collections keyword + + # we always start the search for roles in the base directory of the playbook + role_search_paths = [ + os.path.join(self._loader.get_basedir(), u'roles'), + ] + + # also search in the configured roles path + if C.DEFAULT_ROLES_PATH: + role_search_paths.extend(C.DEFAULT_ROLES_PATH) + + # next, append the roles basedir, if it was set, so we can + # search relative to that directory for dependent roles + if self._role_basedir: + role_search_paths.append(self._role_basedir) + + # finally as a last resort we look in the current basedir as set + # in the loader (which should be the playbook dir itself) but without + # the roles/ dir appended + role_search_paths.append(self._loader.get_basedir()) + + # now iterate through the possible paths and return the first one we find + for path in role_search_paths: + path = templar.template(path) + role_path = unfrackpath(os.path.join(path, role_name)) + if self._loader.path_exists(role_path): + return (role_name, role_path) + + # if not found elsewhere try to extract path from name + role_path = unfrackpath(role_name) + if self._loader.path_exists(role_path): + role_name = os.path.basename(role_name) + return (role_name, role_path) + + searches = (self._collection_list or []) + role_search_paths + raise AnsibleError("the role '%s' was not found in %s" % (role_name, ":".join(searches)), obj=self._ds) + + def _split_role_params(self, ds): + ''' + Splits any random role params off from the role spec and store + them in a dictionary of params for parsing later + ''' + + role_def = dict() + role_params = dict() + base_attribute_names = frozenset(self.fattributes) + for (key, value) in ds.items(): + # use the list of FieldAttribute values to determine what is and is not + # an extra parameter for this role (or sub-class of this role) + # FIXME: hard-coded list of exception key names here corresponds to the + # connection fields in the Base class. There may need to be some + # other mechanism where we exclude certain kinds of field attributes, + # or make this list more automatic in some way so we don't have to + # remember to update it manually. + if key not in base_attribute_names: + # this key does not match a field attribute, so it must be a role param + role_params[key] = value + else: + # this is a field attribute, so copy it over directly + role_def[key] = value + + return (role_def, role_params) + + def get_role_params(self): + return self._role_params.copy() + + def get_role_path(self): + return self._role_path + + def get_name(self, include_role_fqcn=True): + if include_role_fqcn: + return '.'.join(x for x in (self._role_collection, self.role) if x) + return self.role diff --git a/lib/ansible/playbook/role/include.py b/lib/ansible/playbook/role/include.py new file mode 100644 index 0000000..e0d4b67 --- /dev/null +++ b/lib/ansible/playbook/role/include.py @@ -0,0 +1,57 @@ +# (c) 2014 Michael DeHaan, <michael@ansible.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +from ansible.errors import AnsibleError, AnsibleParserError +from ansible.module_utils.six import string_types +from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject +from ansible.playbook.attribute import FieldAttribute +from ansible.playbook.role.definition import RoleDefinition +from ansible.module_utils._text import to_native + + +__all__ = ['RoleInclude'] + + +class RoleInclude(RoleDefinition): + + """ + A derivative of RoleDefinition, used by playbook code when a role + is included for execution in a play. + """ + + delegate_to = FieldAttribute(isa='string') + delegate_facts = FieldAttribute(isa='bool', default=False) + + def __init__(self, play=None, role_basedir=None, variable_manager=None, loader=None, collection_list=None): + super(RoleInclude, self).__init__(play=play, role_basedir=role_basedir, variable_manager=variable_manager, + loader=loader, collection_list=collection_list) + + @staticmethod + def load(data, play, current_role_path=None, parent_role=None, variable_manager=None, loader=None, collection_list=None): + + if not (isinstance(data, string_types) or isinstance(data, dict) or isinstance(data, AnsibleBaseYAMLObject)): + raise AnsibleParserError("Invalid role definition: %s" % to_native(data)) + + if isinstance(data, string_types) and ',' in data: + raise AnsibleError("Invalid old style role requirement: %s" % data) + + ri = RoleInclude(play=play, role_basedir=current_role_path, variable_manager=variable_manager, loader=loader, collection_list=collection_list) + return ri.load_data(data, variable_manager=variable_manager, loader=loader) diff --git a/lib/ansible/playbook/role/metadata.py b/lib/ansible/playbook/role/metadata.py new file mode 100644 index 0000000..275ee54 --- /dev/null +++ b/lib/ansible/playbook/role/metadata.py @@ -0,0 +1,130 @@ +# (c) 2014 Michael DeHaan, <michael@ansible.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os + +from ansible.errors import AnsibleParserError, AnsibleError +from ansible.module_utils._text import to_native +from ansible.module_utils.six import string_types +from ansible.playbook.attribute import FieldAttribute +from ansible.playbook.base import Base +from ansible.playbook.collectionsearch import CollectionSearch +from ansible.playbook.helpers import load_list_of_roles +from ansible.playbook.role.requirement import RoleRequirement + +__all__ = ['RoleMetadata'] + + +class RoleMetadata(Base, CollectionSearch): + ''' + This class wraps the parsing and validation of the optional metadata + within each Role (meta/main.yml). + ''' + + allow_duplicates = FieldAttribute(isa='bool', default=False) + dependencies = FieldAttribute(isa='list', default=list) + galaxy_info = FieldAttribute(isa='GalaxyInfo') + argument_specs = FieldAttribute(isa='dict', default=dict) + + def __init__(self, owner=None): + self._owner = owner + super(RoleMetadata, self).__init__() + + @staticmethod + def load(data, owner, variable_manager=None, loader=None): + ''' + Returns a new RoleMetadata object based on the datastructure passed in. + ''' + + if not isinstance(data, dict): + raise AnsibleParserError("the 'meta/main.yml' for role %s is not a dictionary" % owner.get_name()) + + m = RoleMetadata(owner=owner).load_data(data, variable_manager=variable_manager, loader=loader) + return m + + def _load_dependencies(self, attr, ds): + ''' + This is a helper loading function for the dependencies list, + which returns a list of RoleInclude objects + ''' + + roles = [] + if ds: + if not isinstance(ds, list): + raise AnsibleParserError("Expected role dependencies to be a list.", obj=self._ds) + + for role_def in ds: + # FIXME: consolidate with ansible-galaxy to keep this in sync + if isinstance(role_def, string_types) or 'role' in role_def or 'name' in role_def: + roles.append(role_def) + continue + try: + # role_def is new style: { src: 'galaxy.role,version,name', other_vars: "here" } + def_parsed = RoleRequirement.role_yaml_parse(role_def) + if def_parsed.get('name'): + role_def['name'] = def_parsed['name'] + roles.append(role_def) + except AnsibleError as exc: + raise AnsibleParserError(to_native(exc), obj=role_def, orig_exc=exc) + + current_role_path = None + collection_search_list = None + + if self._owner: + current_role_path = os.path.dirname(self._owner._role_path) + + # if the calling role has a collections search path defined, consult it + collection_search_list = self._owner.collections[:] or [] + + # if the calling role is a collection role, ensure that its containing collection is searched first + owner_collection = self._owner._role_collection + if owner_collection: + collection_search_list = [c for c in collection_search_list if c != owner_collection] + collection_search_list.insert(0, owner_collection) + # ensure fallback role search works + if 'ansible.legacy' not in collection_search_list: + collection_search_list.append('ansible.legacy') + + try: + return load_list_of_roles(roles, play=self._owner._play, current_role_path=current_role_path, + variable_manager=self._variable_manager, loader=self._loader, + collection_search_list=collection_search_list) + except AssertionError as e: + raise AnsibleParserError("A malformed list of role dependencies was encountered.", obj=self._ds, orig_exc=e) + + def _load_galaxy_info(self, attr, ds): + ''' + This is a helper loading function for the galaxy info entry + in the metadata, which returns a GalaxyInfo object rather than + a simple dictionary. + ''' + + return ds + + def serialize(self): + return dict( + allow_duplicates=self._allow_duplicates, + dependencies=self._dependencies + ) + + def deserialize(self, data): + setattr(self, 'allow_duplicates', data.get('allow_duplicates', False)) + setattr(self, 'dependencies', data.get('dependencies', [])) diff --git a/lib/ansible/playbook/role/requirement.py b/lib/ansible/playbook/role/requirement.py new file mode 100644 index 0000000..59e9cf3 --- /dev/null +++ b/lib/ansible/playbook/role/requirement.py @@ -0,0 +1,128 @@ +# (c) 2014 Michael DeHaan, <michael@ansible.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +from ansible.errors import AnsibleError +from ansible.module_utils.six import string_types +from ansible.playbook.role.definition import RoleDefinition +from ansible.utils.display import Display +from ansible.utils.galaxy import scm_archive_resource + +__all__ = ['RoleRequirement'] + +VALID_SPEC_KEYS = [ + 'name', + 'role', + 'scm', + 'src', + 'version', +] + +display = Display() + + +class RoleRequirement(RoleDefinition): + + """ + Helper class for Galaxy, which is used to parse both dependencies + specified in meta/main.yml and requirements.yml files. + """ + + def __init__(self): + pass + + @staticmethod + def repo_url_to_role_name(repo_url): + # gets the role name out of a repo like + # http://git.example.com/repos/repo.git" => "repo" + + if '://' not in repo_url and '@' not in repo_url: + return repo_url + trailing_path = repo_url.split('/')[-1] + if trailing_path.endswith('.git'): + trailing_path = trailing_path[:-4] + if trailing_path.endswith('.tar.gz'): + trailing_path = trailing_path[:-7] + if ',' in trailing_path: + trailing_path = trailing_path.split(',')[0] + return trailing_path + + @staticmethod + def role_yaml_parse(role): + + if isinstance(role, string_types): + name = None + scm = None + src = None + version = None + if ',' in role: + if role.count(',') == 1: + (src, version) = role.strip().split(',', 1) + elif role.count(',') == 2: + (src, version, name) = role.strip().split(',', 2) + else: + raise AnsibleError("Invalid role line (%s). Proper format is 'role_name[,version[,name]]'" % role) + else: + src = role + + if name is None: + name = RoleRequirement.repo_url_to_role_name(src) + if '+' in src: + (scm, src) = src.split('+', 1) + + return dict(name=name, src=src, scm=scm, version=version) + + if 'role' in role: + name = role['role'] + if ',' in name: + raise AnsibleError("Invalid old style role requirement: %s" % name) + else: + del role['role'] + role['name'] = name + else: + role = role.copy() + + if 'src' in role: + # New style: { src: 'galaxy.role,version,name', other_vars: "here" } + if 'github.com' in role["src"] and 'http' in role["src"] and '+' not in role["src"] and not role["src"].endswith('.tar.gz'): + role["src"] = "git+" + role["src"] + + if '+' in role["src"]: + role["scm"], dummy, role["src"] = role["src"].partition('+') + + if 'name' not in role: + role["name"] = RoleRequirement.repo_url_to_role_name(role["src"]) + + if 'version' not in role: + role['version'] = '' + + if 'scm' not in role: + role['scm'] = None + + for key in list(role.keys()): + if key not in VALID_SPEC_KEYS: + role.pop(key) + + return role + + @staticmethod + def scm_archive_role(src, scm='git', name=None, version='HEAD', keep_scm_meta=False): + + return scm_archive_resource(src, scm=scm, name=name, version=version, keep_scm_meta=keep_scm_meta) diff --git a/lib/ansible/playbook/role_include.py b/lib/ansible/playbook/role_include.py new file mode 100644 index 0000000..3946037 --- /dev/null +++ b/lib/ansible/playbook/role_include.py @@ -0,0 +1,185 @@ +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +from os.path import basename + +import ansible.constants as C +from ansible.errors import AnsibleParserError +from ansible.playbook.attribute import FieldAttribute +from ansible.playbook.block import Block +from ansible.playbook.task_include import TaskInclude +from ansible.playbook.role import Role +from ansible.playbook.role.include import RoleInclude +from ansible.utils.display import Display +from ansible.module_utils.six import string_types +from ansible.template import Templar + +__all__ = ['IncludeRole'] + +display = Display() + + +class IncludeRole(TaskInclude): + + """ + A Role include is derived from a regular role to handle the special + circumstances related to the `- include_role: ...` + """ + + BASE = frozenset(('name', 'role')) # directly assigned + FROM_ARGS = frozenset(('tasks_from', 'vars_from', 'defaults_from', 'handlers_from')) # used to populate from dict in role + OTHER_ARGS = frozenset(('apply', 'public', 'allow_duplicates', 'rolespec_validate')) # assigned to matching property + VALID_ARGS = BASE | FROM_ARGS | OTHER_ARGS # all valid args + + # ================================================================================= + # ATTRIBUTES + + # private as this is a 'module options' vs a task property + allow_duplicates = FieldAttribute(isa='bool', default=True, private=True) + public = FieldAttribute(isa='bool', default=False, private=True) + rolespec_validate = FieldAttribute(isa='bool', default=True) + + def __init__(self, block=None, role=None, task_include=None): + + super(IncludeRole, self).__init__(block=block, role=role, task_include=task_include) + + self._from_files = {} + self._parent_role = role + self._role_name = None + self._role_path = None + + def get_name(self): + ''' return the name of the task ''' + return self.name or "%s : %s" % (self.action, self._role_name) + + def get_block_list(self, play=None, variable_manager=None, loader=None): + + # only need play passed in when dynamic + if play is None: + myplay = self._parent._play + else: + myplay = play + + ri = RoleInclude.load(self._role_name, play=myplay, variable_manager=variable_manager, loader=loader, collection_list=self.collections) + ri.vars |= self.vars + + if variable_manager is not None: + available_variables = variable_manager.get_vars(play=myplay, task=self) + else: + available_variables = {} + templar = Templar(loader=loader, variables=available_variables) + from_files = templar.template(self._from_files) + + # build role + actual_role = Role.load(ri, myplay, parent_role=self._parent_role, from_files=from_files, + from_include=True, validate=self.rolespec_validate) + actual_role._metadata.allow_duplicates = self.allow_duplicates + + if self.statically_loaded or self.public: + myplay.roles.append(actual_role) + + # save this for later use + self._role_path = actual_role._role_path + + # compile role with parent roles as dependencies to ensure they inherit + # variables + if not self._parent_role: + dep_chain = [] + else: + dep_chain = list(self._parent_role._parents) + dep_chain.append(self._parent_role) + + p_block = self.build_parent_block() + + # collections value is not inherited; override with the value we calculated during role setup + p_block.collections = actual_role.collections + + blocks = actual_role.compile(play=myplay, dep_chain=dep_chain) + for b in blocks: + b._parent = p_block + # HACK: parent inheritance doesn't seem to have a way to handle this intermediate override until squashed/finalized + b.collections = actual_role.collections + + # updated available handlers in play + handlers = actual_role.get_handler_blocks(play=myplay) + for h in handlers: + h._parent = p_block + myplay.handlers = myplay.handlers + handlers + return blocks, handlers + + @staticmethod + def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None): + + ir = IncludeRole(block, role, task_include=task_include).load_data(data, variable_manager=variable_manager, loader=loader) + + # Validate options + my_arg_names = frozenset(ir.args.keys()) + + # name is needed, or use role as alias + ir._role_name = ir.args.get('name', ir.args.get('role')) + if ir._role_name is None: + raise AnsibleParserError("'name' is a required field for %s." % ir.action, obj=data) + + if 'public' in ir.args and ir.action not in C._ACTION_INCLUDE_ROLE: + raise AnsibleParserError('Invalid options for %s: public' % ir.action, obj=data) + + # validate bad args, otherwise we silently ignore + bad_opts = my_arg_names.difference(IncludeRole.VALID_ARGS) + if bad_opts: + raise AnsibleParserError('Invalid options for %s: %s' % (ir.action, ','.join(list(bad_opts))), obj=data) + + # build options for role includes + for key in my_arg_names.intersection(IncludeRole.FROM_ARGS): + from_key = key.removesuffix('_from') + args_value = ir.args.get(key) + if not isinstance(args_value, string_types): + raise AnsibleParserError('Expected a string for %s but got %s instead' % (key, type(args_value))) + ir._from_files[from_key] = basename(args_value) + + apply_attrs = ir.args.get('apply', {}) + if apply_attrs and ir.action not in C._ACTION_INCLUDE_ROLE: + raise AnsibleParserError('Invalid options for %s: apply' % ir.action, obj=data) + elif not isinstance(apply_attrs, dict): + raise AnsibleParserError('Expected a dict for apply but got %s instead' % type(apply_attrs), obj=data) + + # manual list as otherwise the options would set other task parameters we don't want. + for option in my_arg_names.intersection(IncludeRole.OTHER_ARGS): + setattr(ir, option, ir.args.get(option)) + + return ir + + def copy(self, exclude_parent=False, exclude_tasks=False): + + new_me = super(IncludeRole, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks) + new_me.statically_loaded = self.statically_loaded + new_me._from_files = self._from_files.copy() + new_me._parent_role = self._parent_role + new_me._role_name = self._role_name + new_me._role_path = self._role_path + + return new_me + + def get_include_params(self): + v = super(IncludeRole, self).get_include_params() + if self._parent_role: + v |= self._parent_role.get_role_params() + v.setdefault('ansible_parent_role_names', []).insert(0, self._parent_role.get_name()) + v.setdefault('ansible_parent_role_paths', []).insert(0, self._parent_role._role_path) + return v diff --git a/lib/ansible/playbook/taggable.py b/lib/ansible/playbook/taggable.py new file mode 100644 index 0000000..4038d7f --- /dev/null +++ b/lib/ansible/playbook/taggable.py @@ -0,0 +1,89 @@ +# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +from ansible.errors import AnsibleError +from ansible.module_utils.six import string_types +from ansible.playbook.attribute import FieldAttribute +from ansible.template import Templar + + +class Taggable: + + untagged = frozenset(['untagged']) + tags = FieldAttribute(isa='list', default=list, listof=(string_types, int), extend=True) + + def _load_tags(self, attr, ds): + if isinstance(ds, list): + return ds + elif isinstance(ds, string_types): + value = ds.split(',') + if isinstance(value, list): + return [x.strip() for x in value] + else: + return [ds] + else: + raise AnsibleError('tags must be specified as a list', obj=ds) + + def evaluate_tags(self, only_tags, skip_tags, all_vars): + ''' this checks if the current item should be executed depending on tag options ''' + + if self.tags: + templar = Templar(loader=self._loader, variables=all_vars) + tags = templar.template(self.tags) + + _temp_tags = set() + for tag in tags: + if isinstance(tag, list): + _temp_tags.update(tag) + else: + _temp_tags.add(tag) + tags = _temp_tags + self.tags = list(tags) + else: + # this makes isdisjoint work for untagged + tags = self.untagged + + should_run = True # default, tasks to run + + if only_tags: + if 'always' in tags: + should_run = True + elif ('all' in only_tags and 'never' not in tags): + should_run = True + elif not tags.isdisjoint(only_tags): + should_run = True + elif 'tagged' in only_tags and tags != self.untagged and 'never' not in tags: + should_run = True + else: + should_run = False + + if should_run and skip_tags: + + # Check for tags that we need to skip + if 'all' in skip_tags: + if 'always' not in tags or 'always' in skip_tags: + should_run = False + elif not tags.isdisjoint(skip_tags): + should_run = False + elif 'tagged' in skip_tags and tags != self.untagged: + should_run = False + + return should_run diff --git a/lib/ansible/playbook/task.py b/lib/ansible/playbook/task.py new file mode 100644 index 0000000..6a9136d --- /dev/null +++ b/lib/ansible/playbook/task.py @@ -0,0 +1,511 @@ +# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +from ansible import constants as C +from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable, AnsibleAssertionError +from ansible.module_utils._text import to_native +from ansible.module_utils.six import string_types +from ansible.parsing.mod_args import ModuleArgsParser +from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping +from ansible.plugins.loader import lookup_loader +from ansible.playbook.attribute import FieldAttribute, NonInheritableFieldAttribute +from ansible.playbook.base import Base +from ansible.playbook.block import Block +from ansible.playbook.collectionsearch import CollectionSearch +from ansible.playbook.conditional import Conditional +from ansible.playbook.loop_control import LoopControl +from ansible.playbook.role import Role +from ansible.playbook.taggable import Taggable +from ansible.utils.collection_loader import AnsibleCollectionConfig +from ansible.utils.display import Display +from ansible.utils.sentinel import Sentinel + +__all__ = ['Task'] + +display = Display() + + +class Task(Base, Conditional, Taggable, CollectionSearch): + + """ + A task is a language feature that represents a call to a module, with given arguments and other parameters. + A handler is a subclass of a task. + + Usage: + + Task.load(datastructure) -> Task + Task.something(...) + """ + + # ================================================================================= + # ATTRIBUTES + # load_<attribute_name> and + # validate_<attribute_name> + # will be used if defined + # might be possible to define others + + # NOTE: ONLY set defaults on task attributes that are not inheritable, + # inheritance is only triggered if the 'current value' is Sentinel, + # default can be set at play/top level object and inheritance will take it's course. + + args = FieldAttribute(isa='dict', default=dict) + action = FieldAttribute(isa='string') + + async_val = FieldAttribute(isa='int', default=0, alias='async') + changed_when = FieldAttribute(isa='list', default=list) + delay = FieldAttribute(isa='int', default=5) + delegate_to = FieldAttribute(isa='string') + delegate_facts = FieldAttribute(isa='bool') + failed_when = FieldAttribute(isa='list', default=list) + loop = FieldAttribute() + loop_control = NonInheritableFieldAttribute(isa='class', class_type=LoopControl, default=LoopControl) + notify = FieldAttribute(isa='list') + poll = FieldAttribute(isa='int', default=C.DEFAULT_POLL_INTERVAL) + register = FieldAttribute(isa='string', static=True) + retries = FieldAttribute(isa='int', default=3) + until = FieldAttribute(isa='list', default=list) + + # deprecated, used to be loop and loop_args but loop has been repurposed + loop_with = NonInheritableFieldAttribute(isa='string', private=True) + + def __init__(self, block=None, role=None, task_include=None): + ''' constructors a task, without the Task.load classmethod, it will be pretty blank ''' + + self._role = role + self._parent = None + self.implicit = False + self.resolved_action = None + + if task_include: + self._parent = task_include + else: + self._parent = block + + super(Task, self).__init__() + + def get_name(self, include_role_fqcn=True): + ''' return the name of the task ''' + + if self._role: + role_name = self._role.get_name(include_role_fqcn=include_role_fqcn) + + if self._role and self.name: + return "%s : %s" % (role_name, self.name) + elif self.name: + return self.name + else: + if self._role: + return "%s : %s" % (role_name, self.action) + else: + return "%s" % (self.action,) + + def _merge_kv(self, ds): + if ds is None: + return "" + elif isinstance(ds, string_types): + return ds + elif isinstance(ds, dict): + buf = "" + for (k, v) in ds.items(): + if k.startswith('_'): + continue + buf = buf + "%s=%s " % (k, v) + buf = buf.strip() + return buf + + @staticmethod + def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None): + t = Task(block=block, role=role, task_include=task_include) + return t.load_data(data, variable_manager=variable_manager, loader=loader) + + def __repr__(self): + ''' returns a human readable representation of the task ''' + if self.get_name() in C._ACTION_META: + return "TASK: meta (%s)" % self.args['_raw_params'] + else: + return "TASK: %s" % self.get_name() + + def _preprocess_with_loop(self, ds, new_ds, k, v): + ''' take a lookup plugin name and store it correctly ''' + + loop_name = k.removeprefix("with_") + if new_ds.get('loop') is not None or new_ds.get('loop_with') is not None: + raise AnsibleError("duplicate loop in task: %s" % loop_name, obj=ds) + if v is None: + raise AnsibleError("you must specify a value when using %s" % k, obj=ds) + new_ds['loop_with'] = loop_name + new_ds['loop'] = v + # display.deprecated("with_ type loops are being phased out, use the 'loop' keyword instead", + # version="2.10", collection_name='ansible.builtin') + + def preprocess_data(self, ds): + ''' + tasks are especially complex arguments so need pre-processing. + keep it short. + ''' + + if not isinstance(ds, dict): + raise AnsibleAssertionError('ds (%s) should be a dict but was a %s' % (ds, type(ds))) + + # the new, cleaned datastructure, which will have legacy + # items reduced to a standard structure suitable for the + # attributes of the task class + new_ds = AnsibleMapping() + if isinstance(ds, AnsibleBaseYAMLObject): + new_ds.ansible_pos = ds.ansible_pos + + # since this affects the task action parsing, we have to resolve in preprocess instead of in typical validator + default_collection = AnsibleCollectionConfig.default_collection + + collections_list = ds.get('collections') + if collections_list is None: + # use the parent value if our ds doesn't define it + collections_list = self.collections + else: + # Validate this untemplated field early on to guarantee we are dealing with a list. + # This is also done in CollectionSearch._load_collections() but this runs before that call. + collections_list = self.get_validated_value('collections', self.fattributes.get('collections'), collections_list, None) + + if default_collection and not self._role: # FIXME: and not a collections role + if collections_list: + if default_collection not in collections_list: + collections_list.insert(0, default_collection) + else: + collections_list = [default_collection] + + if collections_list and 'ansible.builtin' not in collections_list and 'ansible.legacy' not in collections_list: + collections_list.append('ansible.legacy') + + if collections_list: + ds['collections'] = collections_list + + # use the args parsing class to determine the action, args, + # and the delegate_to value from the various possible forms + # supported as legacy + args_parser = ModuleArgsParser(task_ds=ds, collection_list=collections_list) + try: + (action, args, delegate_to) = args_parser.parse() + except AnsibleParserError as e: + # if the raises exception was created with obj=ds args, then it includes the detail + # so we dont need to add it so we can just re raise. + if e.obj: + raise + # But if it wasn't, we can add the yaml object now to get more detail + raise AnsibleParserError(to_native(e), obj=ds, orig_exc=e) + else: + self.resolved_action = args_parser.resolved_action + + # the command/shell/script modules used to support the `cmd` arg, + # which corresponds to what we now call _raw_params, so move that + # value over to _raw_params (assuming it is empty) + if action in C._ACTION_HAS_CMD: + if 'cmd' in args: + if args.get('_raw_params', '') != '': + raise AnsibleError("The 'cmd' argument cannot be used when other raw parameters are specified." + " Please put everything in one or the other place.", obj=ds) + args['_raw_params'] = args.pop('cmd') + + new_ds['action'] = action + new_ds['args'] = args + new_ds['delegate_to'] = delegate_to + + # we handle any 'vars' specified in the ds here, as we may + # be adding things to them below (special handling for includes). + # When that deprecated feature is removed, this can be too. + if 'vars' in ds: + # _load_vars is defined in Base, and is used to load a dictionary + # or list of dictionaries in a standard way + new_ds['vars'] = self._load_vars(None, ds.get('vars')) + else: + new_ds['vars'] = dict() + + for (k, v) in ds.items(): + if k in ('action', 'local_action', 'args', 'delegate_to') or k == action or k == 'shell': + # we don't want to re-assign these values, which were determined by the ModuleArgsParser() above + continue + elif k.startswith('with_') and k.removeprefix("with_") in lookup_loader: + # transform into loop property + self._preprocess_with_loop(ds, new_ds, k, v) + elif C.INVALID_TASK_ATTRIBUTE_FAILED or k in self.fattributes: + new_ds[k] = v + else: + display.warning("Ignoring invalid attribute: %s" % k) + + return super(Task, self).preprocess_data(new_ds) + + def _load_loop_control(self, attr, ds): + if not isinstance(ds, dict): + raise AnsibleParserError( + "the `loop_control` value must be specified as a dictionary and cannot " + "be a variable itself (though it can contain variables)", + obj=ds, + ) + + return LoopControl.load(data=ds, variable_manager=self._variable_manager, loader=self._loader) + + def _validate_attributes(self, ds): + try: + super(Task, self)._validate_attributes(ds) + except AnsibleParserError as e: + e.message += '\nThis error can be suppressed as a warning using the "invalid_task_attribute_failed" configuration' + raise e + + def _validate_changed_when(self, attr, name, value): + if not isinstance(value, list): + setattr(self, name, [value]) + + def _validate_failed_when(self, attr, name, value): + if not isinstance(value, list): + setattr(self, name, [value]) + + def post_validate(self, templar): + ''' + Override of base class post_validate, to also do final validation on + the block and task include (if any) to which this task belongs. + ''' + + if self._parent: + self._parent.post_validate(templar) + + if AnsibleCollectionConfig.default_collection: + pass + + super(Task, self).post_validate(templar) + + def _post_validate_loop(self, attr, value, templar): + ''' + Override post validation for the loop field, which is templated + specially in the TaskExecutor class when evaluating loops. + ''' + return value + + def _post_validate_environment(self, attr, value, templar): + ''' + Override post validation of vars on the play, as we don't want to + template these too early. + ''' + env = {} + if value is not None: + + def _parse_env_kv(k, v): + try: + env[k] = templar.template(v, convert_bare=False) + except AnsibleUndefinedVariable as e: + error = to_native(e) + if self.action in C._ACTION_FACT_GATHERING and 'ansible_facts.env' in error or 'ansible_env' in error: + # ignore as fact gathering is required for 'env' facts + return + raise + + if isinstance(value, list): + for env_item in value: + if isinstance(env_item, dict): + for k in env_item: + _parse_env_kv(k, env_item[k]) + else: + isdict = templar.template(env_item, convert_bare=False) + if isinstance(isdict, dict): + env |= isdict + else: + display.warning("could not parse environment value, skipping: %s" % value) + + elif isinstance(value, dict): + # should not really happen + env = dict() + for env_item in value: + _parse_env_kv(env_item, value[env_item]) + else: + # at this point it should be a simple string, also should not happen + env = templar.template(value, convert_bare=False) + + return env + + def _post_validate_changed_when(self, attr, value, templar): + ''' + changed_when is evaluated after the execution of the task is complete, + and should not be templated during the regular post_validate step. + ''' + return value + + def _post_validate_failed_when(self, attr, value, templar): + ''' + failed_when is evaluated after the execution of the task is complete, + and should not be templated during the regular post_validate step. + ''' + return value + + def _post_validate_until(self, attr, value, templar): + ''' + until is evaluated after the execution of the task is complete, + and should not be templated during the regular post_validate step. + ''' + return value + + def get_vars(self): + all_vars = dict() + if self._parent: + all_vars |= self._parent.get_vars() + + all_vars |= self.vars + + if 'tags' in all_vars: + del all_vars['tags'] + if 'when' in all_vars: + del all_vars['when'] + + return all_vars + + def get_include_params(self): + all_vars = dict() + if self._parent: + all_vars |= self._parent.get_include_params() + if self.action in C._ACTION_ALL_INCLUDES: + all_vars |= self.vars + return all_vars + + def copy(self, exclude_parent=False, exclude_tasks=False): + new_me = super(Task, self).copy() + + new_me._parent = None + if self._parent and not exclude_parent: + new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks) + + new_me._role = None + if self._role: + new_me._role = self._role + + new_me.implicit = self.implicit + new_me.resolved_action = self.resolved_action + new_me._uuid = self._uuid + + return new_me + + def serialize(self): + data = super(Task, self).serialize() + + if not self._squashed and not self._finalized: + if self._parent: + data['parent'] = self._parent.serialize() + data['parent_type'] = self._parent.__class__.__name__ + + if self._role: + data['role'] = self._role.serialize() + + data['implicit'] = self.implicit + data['resolved_action'] = self.resolved_action + + return data + + def deserialize(self, data): + + # import is here to avoid import loops + from ansible.playbook.task_include import TaskInclude + from ansible.playbook.handler_task_include import HandlerTaskInclude + + parent_data = data.get('parent', None) + if parent_data: + parent_type = data.get('parent_type') + if parent_type == 'Block': + p = Block() + elif parent_type == 'TaskInclude': + p = TaskInclude() + elif parent_type == 'HandlerTaskInclude': + p = HandlerTaskInclude() + p.deserialize(parent_data) + self._parent = p + del data['parent'] + + role_data = data.get('role') + if role_data: + r = Role() + r.deserialize(role_data) + self._role = r + del data['role'] + + self.implicit = data.get('implicit', False) + self.resolved_action = data.get('resolved_action') + + super(Task, self).deserialize(data) + + def set_loader(self, loader): + ''' + Sets the loader on this object and recursively on parent, child objects. + This is used primarily after the Task has been serialized/deserialized, which + does not preserve the loader. + ''' + + self._loader = loader + + if self._parent: + self._parent.set_loader(loader) + + def _get_parent_attribute(self, attr, omit=False): + ''' + Generic logic to get the attribute or parent attribute for a task value. + ''' + fattr = self.fattributes[attr] + + extend = fattr.extend + prepend = fattr.prepend + + try: + # omit self, and only get parent values + if omit: + value = Sentinel + else: + value = getattr(self, f'_{attr}', Sentinel) + + # If parent is static, we can grab attrs from the parent + # otherwise, defer to the grandparent + if getattr(self._parent, 'statically_loaded', True): + _parent = self._parent + else: + _parent = self._parent._parent + + if _parent and (value is Sentinel or extend): + if getattr(_parent, 'statically_loaded', True): + # vars are always inheritable, other attributes might not be for the parent but still should be for other ancestors + if attr != 'vars' and hasattr(_parent, '_get_parent_attribute'): + parent_value = _parent._get_parent_attribute(attr) + else: + parent_value = getattr(_parent, f'_{attr}', Sentinel) + + if extend: + value = self._extend_value(value, parent_value, prepend) + else: + value = parent_value + except KeyError: + pass + + return value + + def all_parents_static(self): + if self._parent: + return self._parent.all_parents_static() + return True + + def get_first_parent_include(self): + from ansible.playbook.task_include import TaskInclude + if self._parent: + if isinstance(self._parent, TaskInclude): + return self._parent + return self._parent.get_first_parent_include() + return None diff --git a/lib/ansible/playbook/task_include.py b/lib/ansible/playbook/task_include.py new file mode 100644 index 0000000..9c335c6 --- /dev/null +++ b/lib/ansible/playbook/task_include.py @@ -0,0 +1,150 @@ +# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import ansible.constants as C +from ansible.errors import AnsibleParserError +from ansible.playbook.block import Block +from ansible.playbook.task import Task +from ansible.utils.display import Display +from ansible.utils.sentinel import Sentinel + +__all__ = ['TaskInclude'] + +display = Display() + + +class TaskInclude(Task): + + """ + A task include is derived from a regular task to handle the special + circumstances related to the `- include: ...` task. + """ + + BASE = frozenset(('file', '_raw_params')) # directly assigned + OTHER_ARGS = frozenset(('apply',)) # assigned to matching property + VALID_ARGS = BASE.union(OTHER_ARGS) # all valid args + VALID_INCLUDE_KEYWORDS = frozenset(('action', 'args', 'collections', 'debugger', 'ignore_errors', 'loop', 'loop_control', + 'loop_with', 'name', 'no_log', 'register', 'run_once', 'tags', 'timeout', 'vars', + 'when')) + + def __init__(self, block=None, role=None, task_include=None): + super(TaskInclude, self).__init__(block=block, role=role, task_include=task_include) + self.statically_loaded = False + + @staticmethod + def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None): + ti = TaskInclude(block=block, role=role, task_include=task_include) + task = ti.check_options( + ti.load_data(data, variable_manager=variable_manager, loader=loader), + data + ) + + return task + + def check_options(self, task, data): + ''' + Method for options validation to use in 'load_data' for TaskInclude and HandlerTaskInclude + since they share the same validations. It is not named 'validate_options' on purpose + to prevent confusion with '_validate_*" methods. Note that the task passed might be changed + as a side-effect of this method. + ''' + my_arg_names = frozenset(task.args.keys()) + + # validate bad args, otherwise we silently ignore + bad_opts = my_arg_names.difference(self.VALID_ARGS) + if bad_opts and task.action in C._ACTION_ALL_PROPER_INCLUDE_IMPORT_TASKS: + raise AnsibleParserError('Invalid options for %s: %s' % (task.action, ','.join(list(bad_opts))), obj=data) + + if not task.args.get('_raw_params'): + task.args['_raw_params'] = task.args.pop('file', None) + if not task.args['_raw_params']: + raise AnsibleParserError('No file specified for %s' % task.action) + + apply_attrs = task.args.get('apply', {}) + if apply_attrs and task.action not in C._ACTION_INCLUDE_TASKS: + raise AnsibleParserError('Invalid options for %s: apply' % task.action, obj=data) + elif not isinstance(apply_attrs, dict): + raise AnsibleParserError('Expected a dict for apply but got %s instead' % type(apply_attrs), obj=data) + + return task + + def preprocess_data(self, ds): + ds = super(TaskInclude, self).preprocess_data(ds) + + diff = set(ds.keys()).difference(self.VALID_INCLUDE_KEYWORDS) + for k in diff: + # This check doesn't handle ``include`` as we have no idea at this point if it is static or not + if ds[k] is not Sentinel and ds['action'] in C._ACTION_ALL_INCLUDE_ROLE_TASKS: + if C.INVALID_TASK_ATTRIBUTE_FAILED: + raise AnsibleParserError("'%s' is not a valid attribute for a %s" % (k, self.__class__.__name__), obj=ds) + else: + display.warning("Ignoring invalid attribute: %s" % k) + + return ds + + def copy(self, exclude_parent=False, exclude_tasks=False): + new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks) + new_me.statically_loaded = self.statically_loaded + return new_me + + def get_vars(self): + ''' + We override the parent Task() classes get_vars here because + we need to include the args of the include into the vars as + they are params to the included tasks. But ONLY for 'include' + ''' + if self.action not in C._ACTION_INCLUDE: + all_vars = super(TaskInclude, self).get_vars() + else: + all_vars = dict() + if self._parent: + all_vars |= self._parent.get_vars() + + all_vars |= self.vars + all_vars |= self.args + + if 'tags' in all_vars: + del all_vars['tags'] + if 'when' in all_vars: + del all_vars['when'] + + return all_vars + + def build_parent_block(self): + ''' + This method is used to create the parent block for the included tasks + when ``apply`` is specified + ''' + apply_attrs = self.args.pop('apply', {}) + if apply_attrs: + apply_attrs['block'] = [] + p_block = Block.load( + apply_attrs, + play=self._parent._play, + task_include=self, + role=self._role, + variable_manager=self._variable_manager, + loader=self._loader, + ) + else: + p_block = self + + return p_block |