summaryrefslogtreecommitdiffstats
path: root/lib/ansible/vars
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-28 16:04:21 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-28 16:04:21 +0000
commit8a754e0858d922e955e71b253c139e071ecec432 (patch)
tree527d16e74bfd1840c85efd675fdecad056c54107 /lib/ansible/vars
parentInitial commit. (diff)
downloadansible-core-upstream.tar.xz
ansible-core-upstream.zip
Adding upstream version 2.14.3.upstream/2.14.3upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'lib/ansible/vars')
-rw-r--r--lib/ansible/vars/__init__.py0
-rw-r--r--lib/ansible/vars/clean.py171
-rw-r--r--lib/ansible/vars/fact_cache.py72
-rw-r--r--lib/ansible/vars/hostvars.py155
-rw-r--r--lib/ansible/vars/manager.py749
-rw-r--r--lib/ansible/vars/plugins.py114
-rw-r--r--lib/ansible/vars/reserved.py84
7 files changed, 1345 insertions, 0 deletions
diff --git a/lib/ansible/vars/__init__.py b/lib/ansible/vars/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/ansible/vars/__init__.py
diff --git a/lib/ansible/vars/clean.py b/lib/ansible/vars/clean.py
new file mode 100644
index 0000000..1de6fcf
--- /dev/null
+++ b/lib/ansible/vars/clean.py
@@ -0,0 +1,171 @@
+# Copyright (c) 2017 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os
+import re
+
+from collections.abc import MutableMapping, MutableSequence
+
+from ansible import constants as C
+from ansible.errors import AnsibleError
+from ansible.module_utils import six
+from ansible.module_utils._text import to_text
+from ansible.plugins.loader import connection_loader
+from ansible.utils.display import Display
+
+display = Display()
+
+
+def module_response_deepcopy(v):
+ """Function to create a deep copy of module response data
+
+ Designed to be used within the Ansible "engine" to improve performance
+ issues where ``copy.deepcopy`` was used previously, largely with CPU
+ and memory contention.
+
+ This only supports the following data types, and was designed to only
+ handle specific workloads:
+
+ * ``dict``
+ * ``list``
+
+ The data we pass here will come from a serialization such
+ as JSON, so we shouldn't have need for other data types such as
+ ``set`` or ``tuple``.
+
+ Take note that this function should not be used extensively as a
+ replacement for ``deepcopy`` due to the naive way in which this
+ handles other data types.
+
+ Do not expect uses outside of those listed below to maintain
+ backwards compatibility, in case we need to extend this function
+ to handle our specific needs:
+
+ * ``ansible.executor.task_result.TaskResult.clean_copy``
+ * ``ansible.vars.clean.clean_facts``
+ * ``ansible.vars.namespace_facts``
+ """
+ if isinstance(v, dict):
+ ret = v.copy()
+ items = six.iteritems(ret)
+ elif isinstance(v, list):
+ ret = v[:]
+ items = enumerate(ret)
+ else:
+ return v
+
+ for key, value in items:
+ if isinstance(value, (dict, list)):
+ ret[key] = module_response_deepcopy(value)
+ else:
+ ret[key] = value
+
+ return ret
+
+
+def strip_internal_keys(dirty, exceptions=None):
+ # All keys starting with _ansible_ are internal, so change the 'dirty' mapping and remove them.
+
+ if exceptions is None:
+ exceptions = tuple()
+
+ if isinstance(dirty, MutableSequence):
+
+ for element in dirty:
+ if isinstance(element, (MutableMapping, MutableSequence)):
+ strip_internal_keys(element, exceptions=exceptions)
+
+ elif isinstance(dirty, MutableMapping):
+
+ # listify to avoid updating dict while iterating over it
+ for k in list(dirty.keys()):
+ if isinstance(k, six.string_types):
+ if k.startswith('_ansible_') and k not in exceptions:
+ del dirty[k]
+ continue
+
+ if isinstance(dirty[k], (MutableMapping, MutableSequence)):
+ strip_internal_keys(dirty[k], exceptions=exceptions)
+ else:
+ raise AnsibleError("Cannot strip invalid keys from %s" % type(dirty))
+
+ return dirty
+
+
+def remove_internal_keys(data):
+ '''
+ More nuanced version of strip_internal_keys
+ '''
+ for key in list(data.keys()):
+ if (key.startswith('_ansible_') and key != '_ansible_parsed') or key in C.INTERNAL_RESULT_KEYS:
+ display.warning("Removed unexpected internal key in module return: %s = %s" % (key, data[key]))
+ del data[key]
+
+ # remove bad/empty internal keys
+ for key in ['warnings', 'deprecations']:
+ if key in data and not data[key]:
+ del data[key]
+
+ # cleanse fact values that are allowed from actions but not modules
+ for key in list(data.get('ansible_facts', {}).keys()):
+ if key.startswith('discovered_interpreter_') or key.startswith('ansible_discovered_interpreter_'):
+ del data['ansible_facts'][key]
+
+
+def clean_facts(facts):
+ ''' remove facts that can override internal keys or otherwise deemed unsafe '''
+ data = module_response_deepcopy(facts)
+
+ remove_keys = set()
+ fact_keys = set(data.keys())
+ # first we add all of our magic variable names to the set of
+ # keys we want to remove from facts
+ # NOTE: these will eventually disappear in favor of others below
+ for magic_var in C.MAGIC_VARIABLE_MAPPING:
+ remove_keys.update(fact_keys.intersection(C.MAGIC_VARIABLE_MAPPING[magic_var]))
+
+ # remove common connection vars
+ remove_keys.update(fact_keys.intersection(C.COMMON_CONNECTION_VARS))
+
+ # next we remove any connection plugin specific vars
+ for conn_path in connection_loader.all(path_only=True):
+ conn_name = os.path.splitext(os.path.basename(conn_path))[0]
+ re_key = re.compile('^ansible_%s_' % re.escape(conn_name))
+ for fact_key in fact_keys:
+ # most lightweight VM or container tech creates devices with this pattern, this avoids filtering them out
+ if (re_key.match(fact_key) and not fact_key.endswith(('_bridge', '_gwbridge'))) or fact_key.startswith('ansible_become_'):
+ remove_keys.add(fact_key)
+
+ # remove some KNOWN keys
+ for hard in C.RESTRICTED_RESULT_KEYS + C.INTERNAL_RESULT_KEYS:
+ if hard in fact_keys:
+ remove_keys.add(hard)
+
+ # finally, we search for interpreter keys to remove
+ re_interp = re.compile('^ansible_.*_interpreter$')
+ for fact_key in fact_keys:
+ if re_interp.match(fact_key):
+ remove_keys.add(fact_key)
+ # then we remove them (except for ssh host keys)
+ for r_key in remove_keys:
+ if not r_key.startswith('ansible_ssh_host_key_'):
+ display.warning("Removed restricted key from module data: %s" % (r_key))
+ del data[r_key]
+
+ return strip_internal_keys(data)
+
+
+def namespace_facts(facts):
+ ''' return all facts inside 'ansible_facts' w/o an ansible_ prefix '''
+ deprefixed = {}
+ for k in facts:
+ if k.startswith('ansible_') and k not in ('ansible_local',):
+ deprefixed[k[8:]] = module_response_deepcopy(facts[k])
+ else:
+ deprefixed[k] = module_response_deepcopy(facts[k])
+
+ return {'ansible_facts': deprefixed}
diff --git a/lib/ansible/vars/fact_cache.py b/lib/ansible/vars/fact_cache.py
new file mode 100644
index 0000000..868a905
--- /dev/null
+++ b/lib/ansible/vars/fact_cache.py
@@ -0,0 +1,72 @@
+# Copyright: (c) 2014, Michael DeHaan <michael.dehaan@gmail.com>
+# Copyright: (c) 2018, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from collections.abc import MutableMapping
+
+from ansible import constants as C
+from ansible.errors import AnsibleError
+from ansible.plugins.loader import cache_loader
+from ansible.utils.display import Display
+
+
+display = Display()
+
+
+class FactCache(MutableMapping):
+
+ def __init__(self, *args, **kwargs):
+
+ self._plugin = cache_loader.get(C.CACHE_PLUGIN)
+ if not self._plugin:
+ raise AnsibleError('Unable to load the facts cache plugin (%s).' % (C.CACHE_PLUGIN))
+
+ super(FactCache, self).__init__(*args, **kwargs)
+
+ def __getitem__(self, key):
+ if not self._plugin.contains(key):
+ raise KeyError
+ return self._plugin.get(key)
+
+ def __setitem__(self, key, value):
+ self._plugin.set(key, value)
+
+ def __delitem__(self, key):
+ self._plugin.delete(key)
+
+ def __contains__(self, key):
+ return self._plugin.contains(key)
+
+ def __iter__(self):
+ return iter(self._plugin.keys())
+
+ def __len__(self):
+ return len(self._plugin.keys())
+
+ def copy(self):
+ """ Return a primitive copy of the keys and values from the cache. """
+ return dict(self)
+
+ def keys(self):
+ return self._plugin.keys()
+
+ def flush(self):
+ """ Flush the fact cache of all keys. """
+ self._plugin.flush()
+
+ def first_order_merge(self, key, value):
+ host_facts = {key: value}
+
+ try:
+ host_cache = self._plugin.get(key)
+ if host_cache:
+ host_cache.update(value)
+ host_facts[key] = host_cache
+ except KeyError:
+ pass
+
+ super(FactCache, self).update(host_facts)
diff --git a/lib/ansible/vars/hostvars.py b/lib/ansible/vars/hostvars.py
new file mode 100644
index 0000000..e6679ef
--- /dev/null
+++ b/lib/ansible/vars/hostvars.py
@@ -0,0 +1,155 @@
+# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from collections.abc import Mapping
+
+from ansible.template import Templar, AnsibleUndefined
+
+STATIC_VARS = [
+ 'ansible_version',
+ 'ansible_play_hosts',
+ 'ansible_dependent_role_names',
+ 'ansible_play_role_names',
+ 'ansible_role_names',
+ 'inventory_hostname',
+ 'inventory_hostname_short',
+ 'inventory_file',
+ 'inventory_dir',
+ 'groups',
+ 'group_names',
+ 'omit',
+ 'playbook_dir',
+ 'play_hosts',
+ 'role_names',
+ 'ungrouped',
+]
+
+__all__ = ['HostVars', 'HostVarsVars']
+
+
+# Note -- this is a Mapping, not a MutableMapping
+class HostVars(Mapping):
+ ''' A special view of vars_cache that adds values from the inventory when needed. '''
+
+ def __init__(self, inventory, variable_manager, loader):
+ self._inventory = inventory
+ self._loader = loader
+ self._variable_manager = variable_manager
+ variable_manager._hostvars = self
+
+ def set_variable_manager(self, variable_manager):
+ self._variable_manager = variable_manager
+ variable_manager._hostvars = self
+
+ def set_inventory(self, inventory):
+ self._inventory = inventory
+
+ def _find_host(self, host_name):
+ # does not use inventory.hosts so it can create localhost on demand
+ return self._inventory.get_host(host_name)
+
+ def raw_get(self, host_name):
+ '''
+ Similar to __getitem__, however the returned data is not run through
+ the templating engine to expand variables in the hostvars.
+ '''
+ host = self._find_host(host_name)
+ if host is None:
+ return AnsibleUndefined(name="hostvars['%s']" % host_name)
+
+ return self._variable_manager.get_vars(host=host, include_hostvars=False)
+
+ def __setstate__(self, state):
+ self.__dict__.update(state)
+
+ # Methods __getstate__ and __setstate__ of VariableManager do not
+ # preserve _loader and _hostvars attributes to improve pickle
+ # performance and memory utilization. Since HostVars holds values
+ # of those attributes already, assign them if needed.
+ if self._variable_manager._loader is None:
+ self._variable_manager._loader = self._loader
+
+ if self._variable_manager._hostvars is None:
+ self._variable_manager._hostvars = self
+
+ def __getitem__(self, host_name):
+ data = self.raw_get(host_name)
+ if isinstance(data, AnsibleUndefined):
+ return data
+ return HostVarsVars(data, loader=self._loader)
+
+ def set_host_variable(self, host, varname, value):
+ self._variable_manager.set_host_variable(host, varname, value)
+
+ def set_nonpersistent_facts(self, host, facts):
+ self._variable_manager.set_nonpersistent_facts(host, facts)
+
+ def set_host_facts(self, host, facts):
+ self._variable_manager.set_host_facts(host, facts)
+
+ def __contains__(self, host_name):
+ # does not use inventory.hosts so it can create localhost on demand
+ return self._find_host(host_name) is not None
+
+ def __iter__(self):
+ for host in self._inventory.hosts:
+ yield host
+
+ def __len__(self):
+ return len(self._inventory.hosts)
+
+ def __repr__(self):
+ out = {}
+ for host in self._inventory.hosts:
+ out[host] = self.get(host)
+ return repr(out)
+
+ def __deepcopy__(self, memo):
+ # We do not need to deepcopy because HostVars is immutable,
+ # however we have to implement the method so we can deepcopy
+ # variables' dicts that contain HostVars.
+ return self
+
+
+class HostVarsVars(Mapping):
+
+ def __init__(self, variables, loader):
+ self._vars = variables
+ self._loader = loader
+
+ def __getitem__(self, var):
+ templar = Templar(variables=self._vars, loader=self._loader)
+ foo = templar.template(self._vars[var], fail_on_undefined=False, static_vars=STATIC_VARS)
+ return foo
+
+ def __contains__(self, var):
+ return (var in self._vars)
+
+ def __iter__(self):
+ for var in self._vars.keys():
+ yield var
+
+ def __len__(self):
+ return len(self._vars.keys())
+
+ def __repr__(self):
+ templar = Templar(variables=self._vars, loader=self._loader)
+ return repr(templar.template(self._vars, fail_on_undefined=False, static_vars=STATIC_VARS))
diff --git a/lib/ansible/vars/manager.py b/lib/ansible/vars/manager.py
new file mode 100644
index 0000000..a09704e
--- /dev/null
+++ b/lib/ansible/vars/manager.py
@@ -0,0 +1,749 @@
+# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os
+import sys
+
+from collections import defaultdict
+from collections.abc import Mapping, MutableMapping, Sequence
+from hashlib import sha1
+
+from jinja2.exceptions import UndefinedError
+
+from ansible import constants as C
+from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable, AnsibleFileNotFound, AnsibleAssertionError, AnsibleTemplateError
+from ansible.inventory.host import Host
+from ansible.inventory.helpers import sort_groups, get_group_vars
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six import text_type, string_types
+from ansible.plugins.loader import lookup_loader
+from ansible.vars.fact_cache import FactCache
+from ansible.template import Templar
+from ansible.utils.display import Display
+from ansible.utils.listify import listify_lookup_plugin_terms
+from ansible.utils.vars import combine_vars, load_extra_vars, load_options_vars
+from ansible.utils.unsafe_proxy import wrap_var
+from ansible.vars.clean import namespace_facts, clean_facts
+from ansible.vars.plugins import get_vars_from_inventory_sources, get_vars_from_path
+
+display = Display()
+
+
+def preprocess_vars(a):
+ '''
+ Ensures that vars contained in the parameter passed in are
+ returned as a list of dictionaries, to ensure for instance
+ that vars loaded from a file conform to an expected state.
+ '''
+
+ if a is None:
+ return None
+ elif not isinstance(a, list):
+ data = [a]
+ else:
+ data = a
+
+ for item in data:
+ if not isinstance(item, MutableMapping):
+ raise AnsibleError("variable files must contain either a dictionary of variables, or a list of dictionaries. Got: %s (%s)" % (a, type(a)))
+
+ return data
+
+
+class VariableManager:
+
+ _ALLOWED = frozenset(['plugins_by_group', 'groups_plugins_play', 'groups_plugins_inventory', 'groups_inventory',
+ 'all_plugins_play', 'all_plugins_inventory', 'all_inventory'])
+
+ def __init__(self, loader=None, inventory=None, version_info=None):
+ self._nonpersistent_fact_cache = defaultdict(dict)
+ self._vars_cache = defaultdict(dict)
+ self._extra_vars = defaultdict(dict)
+ self._host_vars_files = defaultdict(dict)
+ self._group_vars_files = defaultdict(dict)
+ self._inventory = inventory
+ self._loader = loader
+ self._hostvars = None
+ self._omit_token = '__omit_place_holder__%s' % sha1(os.urandom(64)).hexdigest()
+
+ self._options_vars = load_options_vars(version_info)
+
+ # If the basedir is specified as the empty string then it results in cwd being used.
+ # This is not a safe location to load vars from.
+ basedir = self._options_vars.get('basedir', False)
+ self.safe_basedir = bool(basedir is False or basedir)
+
+ # load extra vars
+ self._extra_vars = load_extra_vars(loader=self._loader)
+
+ # load fact cache
+ try:
+ self._fact_cache = FactCache()
+ except AnsibleError as e:
+ # bad cache plugin is not fatal error
+ # fallback to a dict as in memory cache
+ display.warning(to_text(e))
+ self._fact_cache = {}
+
+ def __getstate__(self):
+ data = dict(
+ fact_cache=self._fact_cache,
+ np_fact_cache=self._nonpersistent_fact_cache,
+ vars_cache=self._vars_cache,
+ extra_vars=self._extra_vars,
+ host_vars_files=self._host_vars_files,
+ group_vars_files=self._group_vars_files,
+ omit_token=self._omit_token,
+ options_vars=self._options_vars,
+ inventory=self._inventory,
+ safe_basedir=self.safe_basedir,
+ )
+ return data
+
+ def __setstate__(self, data):
+ self._fact_cache = data.get('fact_cache', defaultdict(dict))
+ self._nonpersistent_fact_cache = data.get('np_fact_cache', defaultdict(dict))
+ self._vars_cache = data.get('vars_cache', defaultdict(dict))
+ self._extra_vars = data.get('extra_vars', dict())
+ self._host_vars_files = data.get('host_vars_files', defaultdict(dict))
+ self._group_vars_files = data.get('group_vars_files', defaultdict(dict))
+ self._omit_token = data.get('omit_token', '__omit_place_holder__%s' % sha1(os.urandom(64)).hexdigest())
+ self._inventory = data.get('inventory', None)
+ self._options_vars = data.get('options_vars', dict())
+ self.safe_basedir = data.get('safe_basedir', False)
+ self._loader = None
+ self._hostvars = None
+
+ @property
+ def extra_vars(self):
+ return self._extra_vars
+
+ def set_inventory(self, inventory):
+ self._inventory = inventory
+
+ def get_vars(self, play=None, host=None, task=None, include_hostvars=True, include_delegate_to=True, use_cache=True,
+ _hosts=None, _hosts_all=None, stage='task'):
+ '''
+ Returns the variables, with optional "context" given via the parameters
+ for the play, host, and task (which could possibly result in different
+ sets of variables being returned due to the additional context).
+
+ The order of precedence is:
+ - play->roles->get_default_vars (if there is a play context)
+ - group_vars_files[host] (if there is a host context)
+ - host_vars_files[host] (if there is a host context)
+ - host->get_vars (if there is a host context)
+ - fact_cache[host] (if there is a host context)
+ - play vars (if there is a play context)
+ - play vars_files (if there's no host context, ignore
+ file names that cannot be templated)
+ - task->get_vars (if there is a task context)
+ - vars_cache[host] (if there is a host context)
+ - extra vars
+
+ ``_hosts`` and ``_hosts_all`` should be considered private args, with only internal trusted callers relying
+ on the functionality they provide. These arguments may be removed at a later date without a deprecation
+ period and without warning.
+ '''
+
+ display.debug("in VariableManager get_vars()")
+
+ all_vars = dict()
+ magic_variables = self._get_magic_variables(
+ play=play,
+ host=host,
+ task=task,
+ include_hostvars=include_hostvars,
+ include_delegate_to=include_delegate_to,
+ _hosts=_hosts,
+ _hosts_all=_hosts_all,
+ )
+
+ _vars_sources = {}
+
+ def _combine_and_track(data, new_data, source):
+ '''
+ Wrapper function to update var sources dict and call combine_vars()
+
+ See notes in the VarsWithSources docstring for caveats and limitations of the source tracking
+ '''
+ if C.DEFAULT_DEBUG:
+ # Populate var sources dict
+ for key in new_data:
+ _vars_sources[key] = source
+ return combine_vars(data, new_data)
+
+ # default for all cases
+ basedirs = []
+ if self.safe_basedir: # avoid adhoc/console loading cwd
+ basedirs = [self._loader.get_basedir()]
+
+ if play:
+ # first we compile any vars specified in defaults/main.yml
+ # for all roles within the specified play
+ for role in play.get_roles():
+ all_vars = _combine_and_track(all_vars, role.get_default_vars(), "role '%s' defaults" % role.name)
+
+ if task:
+ # set basedirs
+ if C.PLAYBOOK_VARS_ROOT == 'all': # should be default
+ basedirs = task.get_search_path()
+ elif C.PLAYBOOK_VARS_ROOT in ('bottom', 'playbook_dir'): # only option in 2.4.0
+ basedirs = [task.get_search_path()[0]]
+ elif C.PLAYBOOK_VARS_ROOT != 'top':
+ # preserves default basedirs, only option pre 2.3
+ raise AnsibleError('Unknown playbook vars logic: %s' % C.PLAYBOOK_VARS_ROOT)
+
+ # if we have a task in this context, and that task has a role, make
+ # sure it sees its defaults above any other roles, as we previously
+ # (v1) made sure each task had a copy of its roles default vars
+ if task._role is not None and (play or task.action in C._ACTION_INCLUDE_ROLE):
+ all_vars = _combine_and_track(all_vars, task._role.get_default_vars(dep_chain=task.get_dep_chain()),
+ "role '%s' defaults" % task._role.name)
+
+ if host:
+ # THE 'all' group and the rest of groups for a host, used below
+ all_group = self._inventory.groups.get('all')
+ host_groups = sort_groups([g for g in host.get_groups() if g.name not in ['all']])
+
+ def _get_plugin_vars(plugin, path, entities):
+ data = {}
+ try:
+ data = plugin.get_vars(self._loader, path, entities)
+ except AttributeError:
+ try:
+ for entity in entities:
+ if isinstance(entity, Host):
+ data |= plugin.get_host_vars(entity.name)
+ else:
+ data |= plugin.get_group_vars(entity.name)
+ except AttributeError:
+ if hasattr(plugin, 'run'):
+ raise AnsibleError("Cannot use v1 type vars plugin %s from %s" % (plugin._load_name, plugin._original_path))
+ else:
+ raise AnsibleError("Invalid vars plugin %s from %s" % (plugin._load_name, plugin._original_path))
+ return data
+
+ # internal functions that actually do the work
+ def _plugins_inventory(entities):
+ ''' merges all entities by inventory source '''
+ return get_vars_from_inventory_sources(self._loader, self._inventory._sources, entities, stage)
+
+ def _plugins_play(entities):
+ ''' merges all entities adjacent to play '''
+ data = {}
+ for path in basedirs:
+ data = _combine_and_track(data, get_vars_from_path(self._loader, path, entities, stage), "path '%s'" % path)
+ return data
+
+ # configurable functions that are sortable via config, remember to add to _ALLOWED if expanding this list
+ def all_inventory():
+ return all_group.get_vars()
+
+ def all_plugins_inventory():
+ return _plugins_inventory([all_group])
+
+ def all_plugins_play():
+ return _plugins_play([all_group])
+
+ def groups_inventory():
+ ''' gets group vars from inventory '''
+ return get_group_vars(host_groups)
+
+ def groups_plugins_inventory():
+ ''' gets plugin sources from inventory for groups '''
+ return _plugins_inventory(host_groups)
+
+ def groups_plugins_play():
+ ''' gets plugin sources from play for groups '''
+ return _plugins_play(host_groups)
+
+ def plugins_by_groups():
+ '''
+ merges all plugin sources by group,
+ This should be used instead, NOT in combination with the other groups_plugins* functions
+ '''
+ data = {}
+ for group in host_groups:
+ data[group] = _combine_and_track(data[group], _plugins_inventory(group), "inventory group_vars for '%s'" % group)
+ data[group] = _combine_and_track(data[group], _plugins_play(group), "playbook group_vars for '%s'" % group)
+ return data
+
+ # Merge groups as per precedence config
+ # only allow to call the functions we want exposed
+ for entry in C.VARIABLE_PRECEDENCE:
+ if entry in self._ALLOWED:
+ display.debug('Calling %s to load vars for %s' % (entry, host.name))
+ all_vars = _combine_and_track(all_vars, locals()[entry](), "group vars, precedence entry '%s'" % entry)
+ else:
+ display.warning('Ignoring unknown variable precedence entry: %s' % (entry))
+
+ # host vars, from inventory, inventory adjacent and play adjacent via plugins
+ all_vars = _combine_and_track(all_vars, host.get_vars(), "host vars for '%s'" % host)
+ all_vars = _combine_and_track(all_vars, _plugins_inventory([host]), "inventory host_vars for '%s'" % host)
+ all_vars = _combine_and_track(all_vars, _plugins_play([host]), "playbook host_vars for '%s'" % host)
+
+ # finally, the facts caches for this host, if it exists
+ # TODO: cleaning of facts should eventually become part of taskresults instead of vars
+ try:
+ facts = wrap_var(self._fact_cache.get(host.name, {}))
+ all_vars |= namespace_facts(facts)
+
+ # push facts to main namespace
+ if C.INJECT_FACTS_AS_VARS:
+ all_vars = _combine_and_track(all_vars, wrap_var(clean_facts(facts)), "facts")
+ else:
+ # always 'promote' ansible_local
+ all_vars = _combine_and_track(all_vars, wrap_var({'ansible_local': facts.get('ansible_local', {})}), "facts")
+ except KeyError:
+ pass
+
+ if play:
+ all_vars = _combine_and_track(all_vars, play.get_vars(), "play vars")
+
+ vars_files = play.get_vars_files()
+ try:
+ for vars_file_item in vars_files:
+ # create a set of temporary vars here, which incorporate the extra
+ # and magic vars so we can properly template the vars_files entries
+ # NOTE: this makes them depend on host vars/facts so things like
+ # ansible_facts['os_distribution'] can be used, ala include_vars.
+ # Consider DEPRECATING this in the future, since we have include_vars ...
+ temp_vars = combine_vars(all_vars, self._extra_vars)
+ temp_vars = combine_vars(temp_vars, magic_variables)
+ templar = Templar(loader=self._loader, variables=temp_vars)
+
+ # we assume each item in the list is itself a list, as we
+ # support "conditional includes" for vars_files, which mimics
+ # the with_first_found mechanism.
+ vars_file_list = vars_file_item
+ if not isinstance(vars_file_list, list):
+ vars_file_list = [vars_file_list]
+
+ # now we iterate through the (potential) files, and break out
+ # as soon as we read one from the list. If none are found, we
+ # raise an error, which is silently ignored at this point.
+ try:
+ for vars_file in vars_file_list:
+ vars_file = templar.template(vars_file)
+ if not (isinstance(vars_file, Sequence)):
+ raise AnsibleError(
+ "Invalid vars_files entry found: %r\n"
+ "vars_files entries should be either a string type or "
+ "a list of string types after template expansion" % vars_file
+ )
+ try:
+ play_search_stack = play.get_search_path()
+ found_file = real_file = self._loader.path_dwim_relative_stack(play_search_stack, 'vars', vars_file)
+ data = preprocess_vars(self._loader.load_from_file(found_file, unsafe=True, cache=False))
+ if data is not None:
+ for item in data:
+ all_vars = _combine_and_track(all_vars, item, "play vars_files from '%s'" % vars_file)
+ break
+ except AnsibleFileNotFound:
+ # we continue on loader failures
+ continue
+ except AnsibleParserError:
+ raise
+ else:
+ # if include_delegate_to is set to False or we don't have a host, we ignore the missing
+ # vars file here because we're working on a delegated host or require host vars, see NOTE above
+ if include_delegate_to and host:
+ raise AnsibleFileNotFound("vars file %s was not found" % vars_file_item)
+ except (UndefinedError, AnsibleUndefinedVariable):
+ if host is not None and self._fact_cache.get(host.name, dict()).get('module_setup') and task is not None:
+ raise AnsibleUndefinedVariable("an undefined variable was found when attempting to template the vars_files item '%s'"
+ % vars_file_item, obj=vars_file_item)
+ else:
+ # we do not have a full context here, and the missing variable could be because of that
+ # so just show a warning and continue
+ display.vvv("skipping vars_file '%s' due to an undefined variable" % vars_file_item)
+ continue
+
+ display.vvv("Read vars_file '%s'" % vars_file_item)
+ except TypeError:
+ raise AnsibleParserError("Error while reading vars files - please supply a list of file names. "
+ "Got '%s' of type %s" % (vars_files, type(vars_files)))
+
+ # By default, we now merge in all vars from all roles in the play,
+ # unless the user has disabled this via a config option
+ if not C.DEFAULT_PRIVATE_ROLE_VARS:
+ for role in play.get_roles():
+ all_vars = _combine_and_track(all_vars, role.get_vars(include_params=False), "role '%s' vars" % role.name)
+
+ # next, we merge in the vars from the role, which will specifically
+ # follow the role dependency chain, and then we merge in the tasks
+ # vars (which will look at parent blocks/task includes)
+ if task:
+ if task._role:
+ all_vars = _combine_and_track(all_vars, task._role.get_vars(task.get_dep_chain(), include_params=False),
+ "role '%s' vars" % task._role.name)
+ all_vars = _combine_and_track(all_vars, task.get_vars(), "task vars")
+
+ # next, we merge in the vars cache (include vars) and nonpersistent
+ # facts cache (set_fact/register), in that order
+ if host:
+ # include_vars non-persistent cache
+ all_vars = _combine_and_track(all_vars, self._vars_cache.get(host.get_name(), dict()), "include_vars")
+ # fact non-persistent cache
+ all_vars = _combine_and_track(all_vars, self._nonpersistent_fact_cache.get(host.name, dict()), "set_fact")
+
+ # next, we merge in role params and task include params
+ if task:
+ if task._role:
+ all_vars = _combine_and_track(all_vars, task._role.get_role_params(task.get_dep_chain()), "role '%s' params" % task._role.name)
+
+ # special case for include tasks, where the include params
+ # may be specified in the vars field for the task, which should
+ # have higher precedence than the vars/np facts above
+ all_vars = _combine_and_track(all_vars, task.get_include_params(), "include params")
+
+ # extra vars
+ all_vars = _combine_and_track(all_vars, self._extra_vars, "extra vars")
+
+ # magic variables
+ all_vars = _combine_and_track(all_vars, magic_variables, "magic vars")
+
+ # special case for the 'environment' magic variable, as someone
+ # may have set it as a variable and we don't want to stomp on it
+ if task:
+ all_vars['environment'] = task.environment
+
+ # 'vars' magic var
+ if task or play:
+ # has to be copy, otherwise recursive ref
+ all_vars['vars'] = all_vars.copy()
+
+ # if we have a host and task and we're delegating to another host,
+ # figure out the variables for that host now so we don't have to rely on host vars later
+ if task and host and task.delegate_to is not None and include_delegate_to:
+ all_vars['ansible_delegated_vars'], all_vars['_ansible_loop_cache'] = self._get_delegated_vars(play, task, all_vars)
+
+ display.debug("done with get_vars()")
+ if C.DEFAULT_DEBUG:
+ # Use VarsWithSources wrapper class to display var sources
+ return VarsWithSources.new_vars_with_sources(all_vars, _vars_sources)
+ else:
+ return all_vars
+
+ def _get_magic_variables(self, play, host, task, include_hostvars, include_delegate_to, _hosts=None, _hosts_all=None):
+ '''
+ Returns a dictionary of so-called "magic" variables in Ansible,
+ which are special variables we set internally for use.
+ '''
+
+ variables = {}
+ variables['playbook_dir'] = os.path.abspath(self._loader.get_basedir())
+ variables['ansible_playbook_python'] = sys.executable
+ variables['ansible_config_file'] = C.CONFIG_FILE
+
+ if play:
+ # This is a list of all role names of all dependencies for all roles for this play
+ dependency_role_names = list({d.get_name() for r in play.roles for d in r.get_all_dependencies()})
+ # This is a list of all role names of all roles for this play
+ play_role_names = [r.get_name() for r in play.roles]
+
+ # ansible_role_names includes all role names, dependent or directly referenced by the play
+ variables['ansible_role_names'] = list(set(dependency_role_names + play_role_names))
+ # ansible_play_role_names includes the names of all roles directly referenced by this play
+ # roles that are implicitly referenced via dependencies are not listed.
+ variables['ansible_play_role_names'] = play_role_names
+ # ansible_dependent_role_names includes the names of all roles that are referenced via dependencies
+ # dependencies that are also explicitly named as roles are included in this list
+ variables['ansible_dependent_role_names'] = dependency_role_names
+
+ # DEPRECATED: role_names should be deprecated in favor of ansible_role_names or ansible_play_role_names
+ variables['role_names'] = variables['ansible_play_role_names']
+
+ variables['ansible_play_name'] = play.get_name()
+
+ if task:
+ if task._role:
+ variables['role_name'] = task._role.get_name(include_role_fqcn=False)
+ variables['role_path'] = task._role._role_path
+ variables['role_uuid'] = text_type(task._role._uuid)
+ variables['ansible_collection_name'] = task._role._role_collection
+ variables['ansible_role_name'] = task._role.get_name()
+
+ if self._inventory is not None:
+ variables['groups'] = self._inventory.get_groups_dict()
+ if play:
+ templar = Templar(loader=self._loader)
+ if not play.finalized and templar.is_template(play.hosts):
+ pattern = 'all'
+ else:
+ pattern = play.hosts or 'all'
+ # add the list of hosts in the play, as adjusted for limit/filters
+ if not _hosts_all:
+ _hosts_all = [h.name for h in self._inventory.get_hosts(pattern=pattern, ignore_restrictions=True)]
+ if not _hosts:
+ _hosts = [h.name for h in self._inventory.get_hosts()]
+
+ variables['ansible_play_hosts_all'] = _hosts_all[:]
+ variables['ansible_play_hosts'] = [x for x in variables['ansible_play_hosts_all'] if x not in play._removed_hosts]
+ variables['ansible_play_batch'] = [x for x in _hosts if x not in play._removed_hosts]
+
+ # DEPRECATED: play_hosts should be deprecated in favor of ansible_play_batch,
+ # however this would take work in the templating engine, so for now we'll add both
+ variables['play_hosts'] = variables['ansible_play_batch']
+
+ # the 'omit' value allows params to be left out if the variable they are based on is undefined
+ variables['omit'] = self._omit_token
+ # Set options vars
+ for option, option_value in self._options_vars.items():
+ variables[option] = option_value
+
+ if self._hostvars is not None and include_hostvars:
+ variables['hostvars'] = self._hostvars
+
+ return variables
+
+ def _get_delegated_vars(self, play, task, existing_variables):
+ # This method has a lot of code copied from ``TaskExecutor._get_loop_items``
+ # if this is failing, and ``TaskExecutor._get_loop_items`` is not
+ # then more will have to be copied here.
+ # TODO: dedupe code here and with ``TaskExecutor._get_loop_items``
+ # this may be possible once we move pre-processing pre fork
+
+ if not hasattr(task, 'loop'):
+ # This "task" is not a Task, so we need to skip it
+ return {}, None
+
+ # we unfortunately need to template the delegate_to field here,
+ # as we're fetching vars before post_validate has been called on
+ # the task that has been passed in
+ vars_copy = existing_variables.copy()
+
+ # get search path for this task to pass to lookup plugins
+ vars_copy['ansible_search_path'] = task.get_search_path()
+
+ # ensure basedir is always in (dwim already searches here but we need to display it)
+ if self._loader.get_basedir() not in vars_copy['ansible_search_path']:
+ vars_copy['ansible_search_path'].append(self._loader.get_basedir())
+
+ templar = Templar(loader=self._loader, variables=vars_copy)
+
+ items = []
+ has_loop = True
+ if task.loop_with is not None:
+ if task.loop_with in lookup_loader:
+ fail = True
+ if task.loop_with == 'first_found':
+ # first_found loops are special. If the item is undefined then we want to fall through to the next
+ fail = False
+ try:
+ loop_terms = listify_lookup_plugin_terms(terms=task.loop, templar=templar, fail_on_undefined=fail, convert_bare=False)
+
+ if not fail:
+ loop_terms = [t for t in loop_terms if not templar.is_template(t)]
+
+ mylookup = lookup_loader.get(task.loop_with, loader=self._loader, templar=templar)
+
+ # give lookup task 'context' for subdir (mostly needed for first_found)
+ for subdir in ['template', 'var', 'file']: # TODO: move this to constants?
+ if subdir in task.action:
+ break
+ setattr(mylookup, '_subdir', subdir + 's')
+
+ items = wrap_var(mylookup.run(terms=loop_terms, variables=vars_copy))
+
+ except AnsibleTemplateError:
+ # This task will be skipped later due to this, so we just setup
+ # a dummy array for the later code so it doesn't fail
+ items = [None]
+ else:
+ raise AnsibleError("Failed to find the lookup named '%s' in the available lookup plugins" % task.loop_with)
+ elif task.loop is not None:
+ try:
+ items = templar.template(task.loop)
+ except AnsibleTemplateError:
+ # This task will be skipped later due to this, so we just setup
+ # a dummy array for the later code so it doesn't fail
+ items = [None]
+ else:
+ has_loop = False
+ items = [None]
+
+ # since host can change per loop, we keep dict per host name resolved
+ delegated_host_vars = dict()
+ item_var = getattr(task.loop_control, 'loop_var', 'item')
+ cache_items = False
+ for item in items:
+ # update the variables with the item value for templating, in case we need it
+ if item is not None:
+ vars_copy[item_var] = item
+
+ templar.available_variables = vars_copy
+ delegated_host_name = templar.template(task.delegate_to, fail_on_undefined=False)
+ if delegated_host_name != task.delegate_to:
+ cache_items = True
+ if delegated_host_name is None:
+ raise AnsibleError(message="Undefined delegate_to host for task:", obj=task._ds)
+ if not isinstance(delegated_host_name, string_types):
+ raise AnsibleError(message="the field 'delegate_to' has an invalid type (%s), and could not be"
+ " converted to a string type." % type(delegated_host_name), obj=task._ds)
+
+ if delegated_host_name in delegated_host_vars:
+ # no need to repeat ourselves, as the delegate_to value
+ # does not appear to be tied to the loop item variable
+ continue
+
+ # now try to find the delegated-to host in inventory, or failing that,
+ # create a new host on the fly so we can fetch variables for it
+ delegated_host = None
+ if self._inventory is not None:
+ delegated_host = self._inventory.get_host(delegated_host_name)
+ # try looking it up based on the address field, and finally
+ # fall back to creating a host on the fly to use for the var lookup
+ if delegated_host is None:
+ for h in self._inventory.get_hosts(ignore_limits=True, ignore_restrictions=True):
+ # check if the address matches, or if both the delegated_to host
+ # and the current host are in the list of localhost aliases
+ if h.address == delegated_host_name:
+ delegated_host = h
+ break
+ else:
+ delegated_host = Host(name=delegated_host_name)
+ else:
+ delegated_host = Host(name=delegated_host_name)
+
+ # now we go fetch the vars for the delegated-to host and save them in our
+ # master dictionary of variables to be used later in the TaskExecutor/PlayContext
+ delegated_host_vars[delegated_host_name] = self.get_vars(
+ play=play,
+ host=delegated_host,
+ task=task,
+ include_delegate_to=False,
+ include_hostvars=True,
+ )
+ delegated_host_vars[delegated_host_name]['inventory_hostname'] = vars_copy.get('inventory_hostname')
+
+ _ansible_loop_cache = None
+ if has_loop and cache_items:
+ # delegate_to templating produced a change, so we will cache the templated items
+ # in a special private hostvar
+ # this ensures that delegate_to+loop doesn't produce different results than TaskExecutor
+ # which may reprocess the loop
+ _ansible_loop_cache = items
+
+ return delegated_host_vars, _ansible_loop_cache
+
+ def clear_facts(self, hostname):
+ '''
+ Clears the facts for a host
+ '''
+ self._fact_cache.pop(hostname, None)
+
+ def set_host_facts(self, host, facts):
+ '''
+ Sets or updates the given facts for a host in the fact cache.
+ '''
+
+ if not isinstance(facts, Mapping):
+ raise AnsibleAssertionError("the type of 'facts' to set for host_facts should be a Mapping but is a %s" % type(facts))
+
+ try:
+ host_cache = self._fact_cache[host]
+ except KeyError:
+ # We get to set this as new
+ host_cache = facts
+ else:
+ if not isinstance(host_cache, MutableMapping):
+ raise TypeError('The object retrieved for {0} must be a MutableMapping but was'
+ ' a {1}'.format(host, type(host_cache)))
+ # Update the existing facts
+ host_cache |= facts
+
+ # Save the facts back to the backing store
+ self._fact_cache[host] = host_cache
+
+ def set_nonpersistent_facts(self, host, facts):
+ '''
+ Sets or updates the given facts for a host in the fact cache.
+ '''
+
+ if not isinstance(facts, Mapping):
+ raise AnsibleAssertionError("the type of 'facts' to set for nonpersistent_facts should be a Mapping but is a %s" % type(facts))
+
+ try:
+ self._nonpersistent_fact_cache[host] |= facts
+ except KeyError:
+ self._nonpersistent_fact_cache[host] = facts
+
+ def set_host_variable(self, host, varname, value):
+ '''
+ Sets a value in the vars_cache for a host.
+ '''
+ if host not in self._vars_cache:
+ self._vars_cache[host] = dict()
+ if varname in self._vars_cache[host] and isinstance(self._vars_cache[host][varname], MutableMapping) and isinstance(value, MutableMapping):
+ self._vars_cache[host] = combine_vars(self._vars_cache[host], {varname: value})
+ else:
+ self._vars_cache[host][varname] = value
+
+
+class VarsWithSources(MutableMapping):
+ '''
+ Dict-like class for vars that also provides source information for each var
+
+ This class can only store the source for top-level vars. It does no tracking
+ on its own, just shows a debug message with the information that it is provided
+ when a particular var is accessed.
+ '''
+ def __init__(self, *args, **kwargs):
+ ''' Dict-compatible constructor '''
+ self.data = dict(*args, **kwargs)
+ self.sources = {}
+
+ @classmethod
+ def new_vars_with_sources(cls, data, sources):
+ ''' Alternate constructor method to instantiate class with sources '''
+ v = cls(data)
+ v.sources = sources
+ return v
+
+ def get_source(self, key):
+ return self.sources.get(key, None)
+
+ def __getitem__(self, key):
+ val = self.data[key]
+ # See notes in the VarsWithSources docstring for caveats and limitations of the source tracking
+ display.debug("variable '%s' from source: %s" % (key, self.sources.get(key, "unknown")))
+ return val
+
+ def __setitem__(self, key, value):
+ self.data[key] = value
+
+ def __delitem__(self, key):
+ del self.data[key]
+
+ def __iter__(self):
+ return iter(self.data)
+
+ def __len__(self):
+ return len(self.data)
+
+ # Prevent duplicate debug messages by defining our own __contains__ pointing at the underlying dict
+ def __contains__(self, key):
+ return self.data.__contains__(key)
+
+ def copy(self):
+ return VarsWithSources.new_vars_with_sources(self.data.copy(), self.sources.copy())
diff --git a/lib/ansible/vars/plugins.py b/lib/ansible/vars/plugins.py
new file mode 100644
index 0000000..303052b
--- /dev/null
+++ b/lib/ansible/vars/plugins.py
@@ -0,0 +1,114 @@
+# Copyright (c) 2018 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os
+
+from ansible import constants as C
+from ansible.errors import AnsibleError
+from ansible.inventory.host import Host
+from ansible.module_utils._text import to_bytes
+from ansible.plugins.loader import vars_loader
+from ansible.utils.collection_loader import AnsibleCollectionRef
+from ansible.utils.display import Display
+from ansible.utils.vars import combine_vars
+
+display = Display()
+
+
+def get_plugin_vars(loader, plugin, path, entities):
+
+ data = {}
+ try:
+ data = plugin.get_vars(loader, path, entities)
+ except AttributeError:
+ try:
+ for entity in entities:
+ if isinstance(entity, Host):
+ data |= plugin.get_host_vars(entity.name)
+ else:
+ data |= plugin.get_group_vars(entity.name)
+ except AttributeError:
+ if hasattr(plugin, 'run'):
+ raise AnsibleError("Cannot use v1 type vars plugin %s from %s" % (plugin._load_name, plugin._original_path))
+ else:
+ raise AnsibleError("Invalid vars plugin %s from %s" % (plugin._load_name, plugin._original_path))
+ return data
+
+
+def get_vars_from_path(loader, path, entities, stage):
+
+ data = {}
+
+ vars_plugin_list = list(vars_loader.all())
+ for plugin_name in C.VARIABLE_PLUGINS_ENABLED:
+ if AnsibleCollectionRef.is_valid_fqcr(plugin_name):
+ vars_plugin = vars_loader.get(plugin_name)
+ if vars_plugin is None:
+ # Error if there's no play directory or the name is wrong?
+ continue
+ if vars_plugin not in vars_plugin_list:
+ vars_plugin_list.append(vars_plugin)
+
+ for plugin in vars_plugin_list:
+ # legacy plugins always run by default, but they can set REQUIRES_ENABLED=True to opt out.
+
+ builtin_or_legacy = plugin.ansible_name.startswith('ansible.builtin.') or '.' not in plugin.ansible_name
+
+ # builtin is supposed to have REQUIRES_ENABLED=True, the following is for legacy plugins...
+ needs_enabled = not builtin_or_legacy
+ if hasattr(plugin, 'REQUIRES_ENABLED'):
+ needs_enabled = plugin.REQUIRES_ENABLED
+ elif hasattr(plugin, 'REQUIRES_WHITELIST'):
+ display.deprecated("The VarsModule class variable 'REQUIRES_WHITELIST' is deprecated. "
+ "Use 'REQUIRES_ENABLED' instead.", version=2.18)
+ needs_enabled = plugin.REQUIRES_WHITELIST
+
+ # A collection plugin was enabled to get to this point because vars_loader.all() does not include collection plugins.
+ # Warn if a collection plugin has REQUIRES_ENABLED because it has no effect.
+ if not builtin_or_legacy and (hasattr(plugin, 'REQUIRES_ENABLED') or hasattr(plugin, 'REQUIRES_WHITELIST')):
+ display.warning(
+ "Vars plugins in collections must be enabled to be loaded, REQUIRES_ENABLED is not supported. "
+ "This should be removed from the plugin %s." % plugin.ansible_name
+ )
+ elif builtin_or_legacy and needs_enabled and not plugin.matches_name(C.VARIABLE_PLUGINS_ENABLED):
+ continue
+
+ has_stage = hasattr(plugin, 'get_option') and plugin.has_option('stage')
+
+ # if a plugin-specific setting has not been provided, use the global setting
+ # older/non shipped plugins that don't support the plugin-specific setting should also use the global setting
+ use_global = (has_stage and plugin.get_option('stage') is None) or not has_stage
+
+ if use_global:
+ if C.RUN_VARS_PLUGINS == 'demand' and stage == 'inventory':
+ continue
+ elif C.RUN_VARS_PLUGINS == 'start' and stage == 'task':
+ continue
+ elif has_stage and plugin.get_option('stage') not in ('all', stage):
+ continue
+
+ data = combine_vars(data, get_plugin_vars(loader, plugin, path, entities))
+
+ return data
+
+
+def get_vars_from_inventory_sources(loader, sources, entities, stage):
+
+ data = {}
+ for path in sources:
+
+ if path is None:
+ continue
+ if ',' in path and not os.path.exists(path): # skip host lists
+ continue
+ elif not os.path.isdir(to_bytes(path)):
+ # always pass the directory of the inventory source file
+ path = os.path.dirname(path)
+
+ data = combine_vars(data, get_vars_from_path(loader, path, entities, stage))
+
+ return data
diff --git a/lib/ansible/vars/reserved.py b/lib/ansible/vars/reserved.py
new file mode 100644
index 0000000..2d1b4d5
--- /dev/null
+++ b/lib/ansible/vars/reserved.py
@@ -0,0 +1,84 @@
+# (c) 2017 Ansible By Red Hat
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from ansible.playbook import Play
+from ansible.playbook.block import Block
+from ansible.playbook.role import Role
+from ansible.playbook.task import Task
+from ansible.utils.display import Display
+
+display = Display()
+
+
+def get_reserved_names(include_private=True):
+ ''' this function returns the list of reserved names associated with play objects'''
+
+ public = set()
+ private = set()
+ result = set()
+
+ # FIXME: find a way to 'not hardcode', possibly need role deps/includes
+ class_list = [Play, Role, Block, Task]
+
+ for aclass in class_list:
+ # build ordered list to loop over and dict with attributes
+ for name, attr in aclass.fattributes.items():
+ if attr.private:
+ private.add(name)
+ else:
+ public.add(name)
+
+ # local_action is implicit with action
+ if 'action' in public:
+ public.add('local_action')
+
+ # loop implies with_
+ # FIXME: remove after with_ is not only deprecated but removed
+ if 'loop' in private or 'loop' in public:
+ public.add('with_')
+
+ if include_private:
+ result = public.union(private)
+ else:
+ result = public
+
+ return result
+
+
+def warn_if_reserved(myvars, additional=None):
+ ''' this function warns if any variable passed conflicts with internally reserved names '''
+
+ if additional is None:
+ reserved = _RESERVED_NAMES
+ else:
+ reserved = _RESERVED_NAMES.union(additional)
+
+ varnames = set(myvars)
+ varnames.discard('vars') # we add this one internally, so safe to ignore
+ for varname in varnames.intersection(reserved):
+ display.warning('Found variable using reserved name: %s' % varname)
+
+
+def is_reserved_name(name):
+ return name in _RESERVED_NAMES
+
+
+_RESERVED_NAMES = frozenset(get_reserved_names())