summaryrefslogtreecommitdiffstats
path: root/lib/ansible/vars/manager.py
blob: a09704e038b25cda532acbdb750e3a0f89ff7789 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible.  If not, see <http://www.gnu.org/licenses/>.

# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type

import os
import sys

from collections import defaultdict
from collections.abc import Mapping, MutableMapping, Sequence
from hashlib import sha1

from jinja2.exceptions import UndefinedError

from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable, AnsibleFileNotFound, AnsibleAssertionError, AnsibleTemplateError
from ansible.inventory.host import Host
from ansible.inventory.helpers import sort_groups, get_group_vars
from ansible.module_utils._text import to_text
from ansible.module_utils.six import text_type, string_types
from ansible.plugins.loader import lookup_loader
from ansible.vars.fact_cache import FactCache
from ansible.template import Templar
from ansible.utils.display import Display
from ansible.utils.listify import listify_lookup_plugin_terms
from ansible.utils.vars import combine_vars, load_extra_vars, load_options_vars
from ansible.utils.unsafe_proxy import wrap_var
from ansible.vars.clean import namespace_facts, clean_facts
from ansible.vars.plugins import get_vars_from_inventory_sources, get_vars_from_path

display = Display()


def preprocess_vars(a):
    '''
    Ensures that vars contained in the parameter passed in are
    returned as a list of dictionaries, to ensure for instance
    that vars loaded from a file conform to an expected state.
    '''

    if a is None:
        return None
    elif not isinstance(a, list):
        data = [a]
    else:
        data = a

    for item in data:
        if not isinstance(item, MutableMapping):
            raise AnsibleError("variable files must contain either a dictionary of variables, or a list of dictionaries. Got: %s (%s)" % (a, type(a)))

    return data


class VariableManager:

    _ALLOWED = frozenset(['plugins_by_group', 'groups_plugins_play', 'groups_plugins_inventory', 'groups_inventory',
                          'all_plugins_play', 'all_plugins_inventory', 'all_inventory'])

    def __init__(self, loader=None, inventory=None, version_info=None):
        self._nonpersistent_fact_cache = defaultdict(dict)
        self._vars_cache = defaultdict(dict)
        self._extra_vars = defaultdict(dict)
        self._host_vars_files = defaultdict(dict)
        self._group_vars_files = defaultdict(dict)
        self._inventory = inventory
        self._loader = loader
        self._hostvars = None
        self._omit_token = '__omit_place_holder__%s' % sha1(os.urandom(64)).hexdigest()

        self._options_vars = load_options_vars(version_info)

        # If the basedir is specified as the empty string then it results in cwd being used.
        # This is not a safe location to load vars from.
        basedir = self._options_vars.get('basedir', False)
        self.safe_basedir = bool(basedir is False or basedir)

        # load extra vars
        self._extra_vars = load_extra_vars(loader=self._loader)

        # load fact cache
        try:
            self._fact_cache = FactCache()
        except AnsibleError as e:
            # bad cache plugin is not fatal error
            # fallback to a dict as in memory cache
            display.warning(to_text(e))
            self._fact_cache = {}

    def __getstate__(self):
        data = dict(
            fact_cache=self._fact_cache,
            np_fact_cache=self._nonpersistent_fact_cache,
            vars_cache=self._vars_cache,
            extra_vars=self._extra_vars,
            host_vars_files=self._host_vars_files,
            group_vars_files=self._group_vars_files,
            omit_token=self._omit_token,
            options_vars=self._options_vars,
            inventory=self._inventory,
            safe_basedir=self.safe_basedir,
        )
        return data

    def __setstate__(self, data):
        self._fact_cache = data.get('fact_cache', defaultdict(dict))
        self._nonpersistent_fact_cache = data.get('np_fact_cache', defaultdict(dict))
        self._vars_cache = data.get('vars_cache', defaultdict(dict))
        self._extra_vars = data.get('extra_vars', dict())
        self._host_vars_files = data.get('host_vars_files', defaultdict(dict))
        self._group_vars_files = data.get('group_vars_files', defaultdict(dict))
        self._omit_token = data.get('omit_token', '__omit_place_holder__%s' % sha1(os.urandom(64)).hexdigest())
        self._inventory = data.get('inventory', None)
        self._options_vars = data.get('options_vars', dict())
        self.safe_basedir = data.get('safe_basedir', False)
        self._loader = None
        self._hostvars = None

    @property
    def extra_vars(self):
        return self._extra_vars

    def set_inventory(self, inventory):
        self._inventory = inventory

    def get_vars(self, play=None, host=None, task=None, include_hostvars=True, include_delegate_to=True, use_cache=True,
                 _hosts=None, _hosts_all=None, stage='task'):
        '''
        Returns the variables, with optional "context" given via the parameters
        for the play, host, and task (which could possibly result in different
        sets of variables being returned due to the additional context).

        The order of precedence is:
        - play->roles->get_default_vars (if there is a play context)
        - group_vars_files[host] (if there is a host context)
        - host_vars_files[host] (if there is a host context)
        - host->get_vars (if there is a host context)
        - fact_cache[host] (if there is a host context)
        - play vars (if there is a play context)
        - play vars_files (if there's no host context, ignore
          file names that cannot be templated)
        - task->get_vars (if there is a task context)
        - vars_cache[host] (if there is a host context)
        - extra vars

        ``_hosts`` and ``_hosts_all`` should be considered private args, with only internal trusted callers relying
        on the functionality they provide. These arguments may be removed at a later date without a deprecation
        period and without warning.
        '''

        display.debug("in VariableManager get_vars()")

        all_vars = dict()
        magic_variables = self._get_magic_variables(
            play=play,
            host=host,
            task=task,
            include_hostvars=include_hostvars,
            include_delegate_to=include_delegate_to,
            _hosts=_hosts,
            _hosts_all=_hosts_all,
        )

        _vars_sources = {}

        def _combine_and_track(data, new_data, source):
            '''
            Wrapper function to update var sources dict and call combine_vars()

            See notes in the VarsWithSources docstring for caveats and limitations of the source tracking
            '''
            if C.DEFAULT_DEBUG:
                # Populate var sources dict
                for key in new_data:
                    _vars_sources[key] = source
            return combine_vars(data, new_data)

        # default for all cases
        basedirs = []
        if self.safe_basedir:  # avoid adhoc/console loading cwd
            basedirs = [self._loader.get_basedir()]

        if play:
            # first we compile any vars specified in defaults/main.yml
            # for all roles within the specified play
            for role in play.get_roles():
                all_vars = _combine_and_track(all_vars, role.get_default_vars(), "role '%s' defaults" % role.name)

        if task:
            # set basedirs
            if C.PLAYBOOK_VARS_ROOT == 'all':  # should be default
                basedirs = task.get_search_path()
            elif C.PLAYBOOK_VARS_ROOT in ('bottom', 'playbook_dir'):  # only option in 2.4.0
                basedirs = [task.get_search_path()[0]]
            elif C.PLAYBOOK_VARS_ROOT != 'top':
                # preserves default basedirs, only option pre 2.3
                raise AnsibleError('Unknown playbook vars logic: %s' % C.PLAYBOOK_VARS_ROOT)

            # if we have a task in this context, and that task has a role, make
            # sure it sees its defaults above any other roles, as we previously
            # (v1) made sure each task had a copy of its roles default vars
            if task._role is not None and (play or task.action in C._ACTION_INCLUDE_ROLE):
                all_vars = _combine_and_track(all_vars, task._role.get_default_vars(dep_chain=task.get_dep_chain()),
                                              "role '%s' defaults" % task._role.name)

        if host:
            # THE 'all' group and the rest of groups for a host, used below
            all_group = self._inventory.groups.get('all')
            host_groups = sort_groups([g for g in host.get_groups() if g.name not in ['all']])

            def _get_plugin_vars(plugin, path, entities):
                data = {}
                try:
                    data = plugin.get_vars(self._loader, path, entities)
                except AttributeError:
                    try:
                        for entity in entities:
                            if isinstance(entity, Host):
                                data |= plugin.get_host_vars(entity.name)
                            else:
                                data |= plugin.get_group_vars(entity.name)
                    except AttributeError:
                        if hasattr(plugin, 'run'):
                            raise AnsibleError("Cannot use v1 type vars plugin %s from %s" % (plugin._load_name, plugin._original_path))
                        else:
                            raise AnsibleError("Invalid vars plugin %s from %s" % (plugin._load_name, plugin._original_path))
                return data

            # internal functions that actually do the work
            def _plugins_inventory(entities):
                ''' merges all entities by inventory source '''
                return get_vars_from_inventory_sources(self._loader, self._inventory._sources, entities, stage)

            def _plugins_play(entities):
                ''' merges all entities adjacent to play '''
                data = {}
                for path in basedirs:
                    data = _combine_and_track(data, get_vars_from_path(self._loader, path, entities, stage), "path '%s'" % path)
                return data

            # configurable functions that are sortable via config, remember to add to _ALLOWED if expanding this list
            def all_inventory():
                return all_group.get_vars()

            def all_plugins_inventory():
                return _plugins_inventory([all_group])

            def all_plugins_play():
                return _plugins_play([all_group])

            def groups_inventory():
                ''' gets group vars from inventory '''
                return get_group_vars(host_groups)

            def groups_plugins_inventory():
                ''' gets plugin sources from inventory for groups '''
                return _plugins_inventory(host_groups)

            def groups_plugins_play():
                ''' gets plugin sources from play for groups '''
                return _plugins_play(host_groups)

            def plugins_by_groups():
                '''
                    merges all plugin sources by group,
                    This should be used instead, NOT in combination with the other groups_plugins* functions
                '''
                data = {}
                for group in host_groups:
                    data[group] = _combine_and_track(data[group], _plugins_inventory(group), "inventory group_vars for '%s'" % group)
                    data[group] = _combine_and_track(data[group], _plugins_play(group), "playbook group_vars for '%s'" % group)
                return data

            # Merge groups as per precedence config
            # only allow to call the functions we want exposed
            for entry in C.VARIABLE_PRECEDENCE:
                if entry in self._ALLOWED:
                    display.debug('Calling %s to load vars for %s' % (entry, host.name))
                    all_vars = _combine_and_track(all_vars, locals()[entry](), "group vars, precedence entry '%s'" % entry)
                else:
                    display.warning('Ignoring unknown variable precedence entry: %s' % (entry))

            # host vars, from inventory, inventory adjacent and play adjacent via plugins
            all_vars = _combine_and_track(all_vars, host.get_vars(), "host vars for '%s'" % host)
            all_vars = _combine_and_track(all_vars, _plugins_inventory([host]), "inventory host_vars for '%s'" % host)
            all_vars = _combine_and_track(all_vars, _plugins_play([host]), "playbook host_vars for '%s'" % host)

            # finally, the facts caches for this host, if it exists
            # TODO: cleaning of facts should eventually become part of taskresults instead of vars
            try:
                facts = wrap_var(self._fact_cache.get(host.name, {}))
                all_vars |= namespace_facts(facts)

                # push facts to main namespace
                if C.INJECT_FACTS_AS_VARS:
                    all_vars = _combine_and_track(all_vars, wrap_var(clean_facts(facts)), "facts")
                else:
                    # always 'promote' ansible_local
                    all_vars = _combine_and_track(all_vars, wrap_var({'ansible_local': facts.get('ansible_local', {})}), "facts")
            except KeyError:
                pass

        if play:
            all_vars = _combine_and_track(all_vars, play.get_vars(), "play vars")

            vars_files = play.get_vars_files()
            try:
                for vars_file_item in vars_files:
                    # create a set of temporary vars here, which incorporate the extra
                    # and magic vars so we can properly template the vars_files entries
                    # NOTE: this makes them depend on host vars/facts so things like
                    #       ansible_facts['os_distribution'] can be used, ala include_vars.
                    #       Consider DEPRECATING this in the future, since we have include_vars ...
                    temp_vars = combine_vars(all_vars, self._extra_vars)
                    temp_vars = combine_vars(temp_vars, magic_variables)
                    templar = Templar(loader=self._loader, variables=temp_vars)

                    # we assume each item in the list is itself a list, as we
                    # support "conditional includes" for vars_files, which mimics
                    # the with_first_found mechanism.
                    vars_file_list = vars_file_item
                    if not isinstance(vars_file_list, list):
                        vars_file_list = [vars_file_list]

                    # now we iterate through the (potential) files, and break out
                    # as soon as we read one from the list. If none are found, we
                    # raise an error, which is silently ignored at this point.
                    try:
                        for vars_file in vars_file_list:
                            vars_file = templar.template(vars_file)
                            if not (isinstance(vars_file, Sequence)):
                                raise AnsibleError(
                                    "Invalid vars_files entry found: %r\n"
                                    "vars_files entries should be either a string type or "
                                    "a list of string types after template expansion" % vars_file
                                )
                            try:
                                play_search_stack = play.get_search_path()
                                found_file = real_file = self._loader.path_dwim_relative_stack(play_search_stack, 'vars', vars_file)
                                data = preprocess_vars(self._loader.load_from_file(found_file, unsafe=True, cache=False))
                                if data is not None:
                                    for item in data:
                                        all_vars = _combine_and_track(all_vars, item, "play vars_files from '%s'" % vars_file)
                                break
                            except AnsibleFileNotFound:
                                # we continue on loader failures
                                continue
                            except AnsibleParserError:
                                raise
                        else:
                            # if include_delegate_to is set to False or we don't have a host, we ignore the missing
                            # vars file here because we're working on a delegated host or require host vars, see NOTE above
                            if include_delegate_to and host:
                                raise AnsibleFileNotFound("vars file %s was not found" % vars_file_item)
                    except (UndefinedError, AnsibleUndefinedVariable):
                        if host is not None and self._fact_cache.get(host.name, dict()).get('module_setup') and task is not None:
                            raise AnsibleUndefinedVariable("an undefined variable was found when attempting to template the vars_files item '%s'"
                                                           % vars_file_item, obj=vars_file_item)
                        else:
                            # we do not have a full context here, and the missing variable could be because of that
                            # so just show a warning and continue
                            display.vvv("skipping vars_file '%s' due to an undefined variable" % vars_file_item)
                            continue

                    display.vvv("Read vars_file '%s'" % vars_file_item)
            except TypeError:
                raise AnsibleParserError("Error while reading vars files - please supply a list of file names. "
                                         "Got '%s' of type %s" % (vars_files, type(vars_files)))

            # By default, we now merge in all vars from all roles in the play,
            # unless the user has disabled this via a config option
            if not C.DEFAULT_PRIVATE_ROLE_VARS:
                for role in play.get_roles():
                    all_vars = _combine_and_track(all_vars, role.get_vars(include_params=False), "role '%s' vars" % role.name)

        # next, we merge in the vars from the role, which will specifically
        # follow the role dependency chain, and then we merge in the tasks
        # vars (which will look at parent blocks/task includes)
        if task:
            if task._role:
                all_vars = _combine_and_track(all_vars, task._role.get_vars(task.get_dep_chain(), include_params=False),
                                              "role '%s' vars" % task._role.name)
            all_vars = _combine_and_track(all_vars, task.get_vars(), "task vars")

        # next, we merge in the vars cache (include vars) and nonpersistent
        # facts cache (set_fact/register), in that order
        if host:
            # include_vars non-persistent cache
            all_vars = _combine_and_track(all_vars, self._vars_cache.get(host.get_name(), dict()), "include_vars")
            # fact non-persistent cache
            all_vars = _combine_and_track(all_vars, self._nonpersistent_fact_cache.get(host.name, dict()), "set_fact")

        # next, we merge in role params and task include params
        if task:
            if task._role:
                all_vars = _combine_and_track(all_vars, task._role.get_role_params(task.get_dep_chain()), "role '%s' params" % task._role.name)

            # special case for include tasks, where the include params
            # may be specified in the vars field for the task, which should
            # have higher precedence than the vars/np facts above
            all_vars = _combine_and_track(all_vars, task.get_include_params(), "include params")

        # extra vars
        all_vars = _combine_and_track(all_vars, self._extra_vars, "extra vars")

        # magic variables
        all_vars = _combine_and_track(all_vars, magic_variables, "magic vars")

        # special case for the 'environment' magic variable, as someone
        # may have set it as a variable and we don't want to stomp on it
        if task:
            all_vars['environment'] = task.environment

        # 'vars' magic var
        if task or play:
            # has to be copy, otherwise recursive ref
            all_vars['vars'] = all_vars.copy()

        # if we have a host and task and we're delegating to another host,
        # figure out the variables for that host now so we don't have to rely on host vars later
        if task and host and task.delegate_to is not None and include_delegate_to:
            all_vars['ansible_delegated_vars'], all_vars['_ansible_loop_cache'] = self._get_delegated_vars(play, task, all_vars)

        display.debug("done with get_vars()")
        if C.DEFAULT_DEBUG:
            # Use VarsWithSources wrapper class to display var sources
            return VarsWithSources.new_vars_with_sources(all_vars, _vars_sources)
        else:
            return all_vars

    def _get_magic_variables(self, play, host, task, include_hostvars, include_delegate_to, _hosts=None, _hosts_all=None):
        '''
        Returns a dictionary of so-called "magic" variables in Ansible,
        which are special variables we set internally for use.
        '''

        variables = {}
        variables['playbook_dir'] = os.path.abspath(self._loader.get_basedir())
        variables['ansible_playbook_python'] = sys.executable
        variables['ansible_config_file'] = C.CONFIG_FILE

        if play:
            # This is a list of all role names of all dependencies for all roles for this play
            dependency_role_names = list({d.get_name() for r in play.roles for d in r.get_all_dependencies()})
            # This is a list of all role names of all roles for this play
            play_role_names = [r.get_name() for r in play.roles]

            # ansible_role_names includes all role names, dependent or directly referenced by the play
            variables['ansible_role_names'] = list(set(dependency_role_names + play_role_names))
            # ansible_play_role_names includes the names of all roles directly referenced by this play
            # roles that are implicitly referenced via dependencies are not listed.
            variables['ansible_play_role_names'] = play_role_names
            # ansible_dependent_role_names includes the names of all roles that are referenced via dependencies
            # dependencies that are also explicitly named as roles are included in this list
            variables['ansible_dependent_role_names'] = dependency_role_names

            # DEPRECATED: role_names should be deprecated in favor of ansible_role_names or ansible_play_role_names
            variables['role_names'] = variables['ansible_play_role_names']

            variables['ansible_play_name'] = play.get_name()

        if task:
            if task._role:
                variables['role_name'] = task._role.get_name(include_role_fqcn=False)
                variables['role_path'] = task._role._role_path
                variables['role_uuid'] = text_type(task._role._uuid)
                variables['ansible_collection_name'] = task._role._role_collection
                variables['ansible_role_name'] = task._role.get_name()

        if self._inventory is not None:
            variables['groups'] = self._inventory.get_groups_dict()
            if play:
                templar = Templar(loader=self._loader)
                if not play.finalized and templar.is_template(play.hosts):
                    pattern = 'all'
                else:
                    pattern = play.hosts or 'all'
                # add the list of hosts in the play, as adjusted for limit/filters
                if not _hosts_all:
                    _hosts_all = [h.name for h in self._inventory.get_hosts(pattern=pattern, ignore_restrictions=True)]
                if not _hosts:
                    _hosts = [h.name for h in self._inventory.get_hosts()]

                variables['ansible_play_hosts_all'] = _hosts_all[:]
                variables['ansible_play_hosts'] = [x for x in variables['ansible_play_hosts_all'] if x not in play._removed_hosts]
                variables['ansible_play_batch'] = [x for x in _hosts if x not in play._removed_hosts]

                # DEPRECATED: play_hosts should be deprecated in favor of ansible_play_batch,
                # however this would take work in the templating engine, so for now we'll add both
                variables['play_hosts'] = variables['ansible_play_batch']

        # the 'omit' value allows params to be left out if the variable they are based on is undefined
        variables['omit'] = self._omit_token
        # Set options vars
        for option, option_value in self._options_vars.items():
            variables[option] = option_value

        if self._hostvars is not None and include_hostvars:
            variables['hostvars'] = self._hostvars

        return variables

    def _get_delegated_vars(self, play, task, existing_variables):
        # This method has a lot of code copied from ``TaskExecutor._get_loop_items``
        # if this is failing, and ``TaskExecutor._get_loop_items`` is not
        # then more will have to be copied here.
        # TODO: dedupe code here and with ``TaskExecutor._get_loop_items``
        #       this may be possible once we move pre-processing pre fork

        if not hasattr(task, 'loop'):
            # This "task" is not a Task, so we need to skip it
            return {}, None

        # we unfortunately need to template the delegate_to field here,
        # as we're fetching vars before post_validate has been called on
        # the task that has been passed in
        vars_copy = existing_variables.copy()

        # get search path for this task to pass to lookup plugins
        vars_copy['ansible_search_path'] = task.get_search_path()

        # ensure basedir is always in (dwim already searches here but we need to display it)
        if self._loader.get_basedir() not in vars_copy['ansible_search_path']:
            vars_copy['ansible_search_path'].append(self._loader.get_basedir())

        templar = Templar(loader=self._loader, variables=vars_copy)

        items = []
        has_loop = True
        if task.loop_with is not None:
            if task.loop_with in lookup_loader:
                fail = True
                if task.loop_with == 'first_found':
                    # first_found loops are special. If the item is undefined then we want to fall through to the next
                    fail = False
                try:
                    loop_terms = listify_lookup_plugin_terms(terms=task.loop, templar=templar, fail_on_undefined=fail, convert_bare=False)

                    if not fail:
                        loop_terms = [t for t in loop_terms if not templar.is_template(t)]

                    mylookup = lookup_loader.get(task.loop_with, loader=self._loader, templar=templar)

                    # give lookup task 'context' for subdir (mostly needed for first_found)
                    for subdir in ['template', 'var', 'file']:  # TODO: move this to constants?
                        if subdir in task.action:
                            break
                    setattr(mylookup, '_subdir', subdir + 's')

                    items = wrap_var(mylookup.run(terms=loop_terms, variables=vars_copy))

                except AnsibleTemplateError:
                    # This task will be skipped later due to this, so we just setup
                    # a dummy array for the later code so it doesn't fail
                    items = [None]
            else:
                raise AnsibleError("Failed to find the lookup named '%s' in the available lookup plugins" % task.loop_with)
        elif task.loop is not None:
            try:
                items = templar.template(task.loop)
            except AnsibleTemplateError:
                # This task will be skipped later due to this, so we just setup
                # a dummy array for the later code so it doesn't fail
                items = [None]
        else:
            has_loop = False
            items = [None]

        # since host can change per loop, we keep dict per host name resolved
        delegated_host_vars = dict()
        item_var = getattr(task.loop_control, 'loop_var', 'item')
        cache_items = False
        for item in items:
            # update the variables with the item value for templating, in case we need it
            if item is not None:
                vars_copy[item_var] = item

            templar.available_variables = vars_copy
            delegated_host_name = templar.template(task.delegate_to, fail_on_undefined=False)
            if delegated_host_name != task.delegate_to:
                cache_items = True
            if delegated_host_name is None:
                raise AnsibleError(message="Undefined delegate_to host for task:", obj=task._ds)
            if not isinstance(delegated_host_name, string_types):
                raise AnsibleError(message="the field 'delegate_to' has an invalid type (%s), and could not be"
                                           " converted to a string type." % type(delegated_host_name), obj=task._ds)

            if delegated_host_name in delegated_host_vars:
                # no need to repeat ourselves, as the delegate_to value
                # does not appear to be tied to the loop item variable
                continue

            # now try to find the delegated-to host in inventory, or failing that,
            # create a new host on the fly so we can fetch variables for it
            delegated_host = None
            if self._inventory is not None:
                delegated_host = self._inventory.get_host(delegated_host_name)
                # try looking it up based on the address field, and finally
                # fall back to creating a host on the fly to use for the var lookup
                if delegated_host is None:
                    for h in self._inventory.get_hosts(ignore_limits=True, ignore_restrictions=True):
                        # check if the address matches, or if both the delegated_to host
                        # and the current host are in the list of localhost aliases
                        if h.address == delegated_host_name:
                            delegated_host = h
                            break
                    else:
                        delegated_host = Host(name=delegated_host_name)
            else:
                delegated_host = Host(name=delegated_host_name)

            # now we go fetch the vars for the delegated-to host and save them in our
            # master dictionary of variables to be used later in the TaskExecutor/PlayContext
            delegated_host_vars[delegated_host_name] = self.get_vars(
                play=play,
                host=delegated_host,
                task=task,
                include_delegate_to=False,
                include_hostvars=True,
            )
            delegated_host_vars[delegated_host_name]['inventory_hostname'] = vars_copy.get('inventory_hostname')

        _ansible_loop_cache = None
        if has_loop and cache_items:
            # delegate_to templating produced a change, so we will cache the templated items
            # in a special private hostvar
            # this ensures that delegate_to+loop doesn't produce different results than TaskExecutor
            # which may reprocess the loop
            _ansible_loop_cache = items

        return delegated_host_vars, _ansible_loop_cache

    def clear_facts(self, hostname):
        '''
        Clears the facts for a host
        '''
        self._fact_cache.pop(hostname, None)

    def set_host_facts(self, host, facts):
        '''
        Sets or updates the given facts for a host in the fact cache.
        '''

        if not isinstance(facts, Mapping):
            raise AnsibleAssertionError("the type of 'facts' to set for host_facts should be a Mapping but is a %s" % type(facts))

        try:
            host_cache = self._fact_cache[host]
        except KeyError:
            # We get to set this as new
            host_cache = facts
        else:
            if not isinstance(host_cache, MutableMapping):
                raise TypeError('The object retrieved for {0} must be a MutableMapping but was'
                                ' a {1}'.format(host, type(host_cache)))
            # Update the existing facts
            host_cache |= facts

        # Save the facts back to the backing store
        self._fact_cache[host] = host_cache

    def set_nonpersistent_facts(self, host, facts):
        '''
        Sets or updates the given facts for a host in the fact cache.
        '''

        if not isinstance(facts, Mapping):
            raise AnsibleAssertionError("the type of 'facts' to set for nonpersistent_facts should be a Mapping but is a %s" % type(facts))

        try:
            self._nonpersistent_fact_cache[host] |= facts
        except KeyError:
            self._nonpersistent_fact_cache[host] = facts

    def set_host_variable(self, host, varname, value):
        '''
        Sets a value in the vars_cache for a host.
        '''
        if host not in self._vars_cache:
            self._vars_cache[host] = dict()
        if varname in self._vars_cache[host] and isinstance(self._vars_cache[host][varname], MutableMapping) and isinstance(value, MutableMapping):
            self._vars_cache[host] = combine_vars(self._vars_cache[host], {varname: value})
        else:
            self._vars_cache[host][varname] = value


class VarsWithSources(MutableMapping):
    '''
    Dict-like class for vars that also provides source information for each var

    This class can only store the source for top-level vars. It does no tracking
    on its own, just shows a debug message with the information that it is provided
    when a particular var is accessed.
    '''
    def __init__(self, *args, **kwargs):
        ''' Dict-compatible constructor '''
        self.data = dict(*args, **kwargs)
        self.sources = {}

    @classmethod
    def new_vars_with_sources(cls, data, sources):
        ''' Alternate constructor method to instantiate class with sources '''
        v = cls(data)
        v.sources = sources
        return v

    def get_source(self, key):
        return self.sources.get(key, None)

    def __getitem__(self, key):
        val = self.data[key]
        # See notes in the VarsWithSources docstring for caveats and limitations of the source tracking
        display.debug("variable '%s' from source: %s" % (key, self.sources.get(key, "unknown")))
        return val

    def __setitem__(self, key, value):
        self.data[key] = value

    def __delitem__(self, key):
        del self.data[key]

    def __iter__(self):
        return iter(self.data)

    def __len__(self):
        return len(self.data)

    # Prevent duplicate debug messages by defining our own __contains__ pointing at the underlying dict
    def __contains__(self, key):
        return self.data.__contains__(key)

    def copy(self):
        return VarsWithSources.new_vars_with_sources(self.data.copy(), self.sources.copy())