summaryrefslogtreecommitdiffstats
path: root/lib/ansible/utils
diff options
context:
space:
mode:
Diffstat (limited to 'lib/ansible/utils')
-rw-r--r--lib/ansible/utils/__init__.py20
-rw-r--r--lib/ansible/utils/_junit_xml.py267
-rw-r--r--lib/ansible/utils/cmd_functions.py66
-rw-r--r--lib/ansible/utils/collection_loader/__init__.py26
-rw-r--r--lib/ansible/utils/collection_loader/_collection_config.py103
-rw-r--r--lib/ansible/utils/collection_loader/_collection_finder.py1161
-rw-r--r--lib/ansible/utils/collection_loader/_collection_meta.py32
-rw-r--r--lib/ansible/utils/color.py112
-rw-r--r--lib/ansible/utils/context_objects.py92
-rw-r--r--lib/ansible/utils/display.py526
-rw-r--r--lib/ansible/utils/encrypt.py272
-rw-r--r--lib/ansible/utils/fqcn.py33
-rw-r--r--lib/ansible/utils/galaxy.py107
-rw-r--r--lib/ansible/utils/hashing.py89
-rw-r--r--lib/ansible/utils/helpers.py51
-rw-r--r--lib/ansible/utils/jsonrpc.py113
-rw-r--r--lib/ansible/utils/listify.py46
-rw-r--r--lib/ansible/utils/lock.py43
-rw-r--r--lib/ansible/utils/multiprocessing.py17
-rw-r--r--lib/ansible/utils/native_jinja.py13
-rw-r--r--lib/ansible/utils/path.py161
-rw-r--r--lib/ansible/utils/plugin_docs.py351
-rw-r--r--lib/ansible/utils/py3compat.py70
-rw-r--r--lib/ansible/utils/sentinel.py68
-rw-r--r--lib/ansible/utils/shlex.py34
-rw-r--r--lib/ansible/utils/singleton.py29
-rw-r--r--lib/ansible/utils/ssh_functions.py66
-rw-r--r--lib/ansible/utils/unicode.py33
-rw-r--r--lib/ansible/utils/unsafe_proxy.py128
-rw-r--r--lib/ansible/utils/vars.py293
-rw-r--r--lib/ansible/utils/version.py272
31 files changed, 4694 insertions, 0 deletions
diff --git a/lib/ansible/utils/__init__.py b/lib/ansible/utils/__init__.py
new file mode 100644
index 0000000..ae8ccff
--- /dev/null
+++ b/lib/ansible/utils/__init__.py
@@ -0,0 +1,20 @@
+# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
diff --git a/lib/ansible/utils/_junit_xml.py b/lib/ansible/utils/_junit_xml.py
new file mode 100644
index 0000000..3b95867
--- /dev/null
+++ b/lib/ansible/utils/_junit_xml.py
@@ -0,0 +1,267 @@
+"""
+Dataclasses for creating JUnit XML files.
+See: https://github.com/junit-team/junit5/blob/main/platform-tests/src/test/resources/jenkins-junit.xsd
+"""
+from __future__ import annotations
+
+import abc
+import dataclasses
+import datetime
+import decimal
+
+from xml.dom import minidom
+# noinspection PyPep8Naming
+from xml.etree import ElementTree as ET
+
+
+@dataclasses.dataclass # type: ignore[misc] # https://github.com/python/mypy/issues/5374
+class TestResult(metaclass=abc.ABCMeta):
+ """Base class for the result of a test case."""
+ output: str | None = None
+ message: str | None = None
+ type: str | None = None
+
+ def __post_init__(self):
+ if self.type is None:
+ self.type = self.tag
+
+ @property
+ @abc.abstractmethod
+ def tag(self) -> str:
+ """Tag name for the XML element created by this result type."""
+
+ def get_attributes(self) -> dict[str, str]:
+ """Return a dictionary of attributes for this instance."""
+ return _attributes(
+ message=self.message,
+ type=self.type,
+ )
+
+ def get_xml_element(self) -> ET.Element:
+ """Return an XML element representing this instance."""
+ element = ET.Element(self.tag, self.get_attributes())
+ element.text = self.output
+
+ return element
+
+
+@dataclasses.dataclass
+class TestFailure(TestResult):
+ """Failure info for a test case."""
+ @property
+ def tag(self) -> str:
+ """Tag name for the XML element created by this result type."""
+ return 'failure'
+
+
+@dataclasses.dataclass
+class TestError(TestResult):
+ """Error info for a test case."""
+ @property
+ def tag(self) -> str:
+ """Tag name for the XML element created by this result type."""
+ return 'error'
+
+
+@dataclasses.dataclass
+class TestCase:
+ """An individual test case."""
+ name: str
+ assertions: int | None = None
+ classname: str | None = None
+ status: str | None = None
+ time: decimal.Decimal | None = None
+
+ errors: list[TestError] = dataclasses.field(default_factory=list)
+ failures: list[TestFailure] = dataclasses.field(default_factory=list)
+ skipped: str | None = None
+ system_out: str | None = None
+ system_err: str | None = None
+
+ is_disabled: bool = False
+
+ @property
+ def is_failure(self) -> bool:
+ """True if the test case contains failure info."""
+ return bool(self.failures)
+
+ @property
+ def is_error(self) -> bool:
+ """True if the test case contains error info."""
+ return bool(self.errors)
+
+ @property
+ def is_skipped(self) -> bool:
+ """True if the test case was skipped."""
+ return bool(self.skipped)
+
+ def get_attributes(self) -> dict[str, str]:
+ """Return a dictionary of attributes for this instance."""
+ return _attributes(
+ assertions=self.assertions,
+ classname=self.classname,
+ name=self.name,
+ status=self.status,
+ time=self.time,
+ )
+
+ def get_xml_element(self) -> ET.Element:
+ """Return an XML element representing this instance."""
+ element = ET.Element('testcase', self.get_attributes())
+
+ if self.skipped:
+ ET.SubElement(element, 'skipped').text = self.skipped
+
+ element.extend([error.get_xml_element() for error in self.errors])
+ element.extend([failure.get_xml_element() for failure in self.failures])
+
+ if self.system_out:
+ ET.SubElement(element, 'system-out').text = self.system_out
+
+ if self.system_err:
+ ET.SubElement(element, 'system-err').text = self.system_err
+
+ return element
+
+
+@dataclasses.dataclass
+class TestSuite:
+ """A collection of test cases."""
+ name: str
+ hostname: str | None = None
+ id: str | None = None
+ package: str | None = None
+ timestamp: datetime.datetime | None = None
+
+ properties: dict[str, str] = dataclasses.field(default_factory=dict)
+ cases: list[TestCase] = dataclasses.field(default_factory=list)
+ system_out: str | None = None
+ system_err: str | None = None
+
+ @property
+ def disabled(self) -> int:
+ """The number of disabled test cases."""
+ return sum(case.is_disabled for case in self.cases)
+
+ @property
+ def errors(self) -> int:
+ """The number of test cases containing error info."""
+ return sum(case.is_error for case in self.cases)
+
+ @property
+ def failures(self) -> int:
+ """The number of test cases containing failure info."""
+ return sum(case.is_failure for case in self.cases)
+
+ @property
+ def skipped(self) -> int:
+ """The number of test cases containing skipped info."""
+ return sum(case.is_skipped for case in self.cases)
+
+ @property
+ def tests(self) -> int:
+ """The number of test cases."""
+ return len(self.cases)
+
+ @property
+ def time(self) -> decimal.Decimal:
+ """The total time from all test cases."""
+ return decimal.Decimal(sum(case.time for case in self.cases if case.time))
+
+ def get_attributes(self) -> dict[str, str]:
+ """Return a dictionary of attributes for this instance."""
+ return _attributes(
+ disabled=self.disabled,
+ errors=self.errors,
+ failures=self.failures,
+ hostname=self.hostname,
+ id=self.id,
+ name=self.name,
+ package=self.package,
+ skipped=self.skipped,
+ tests=self.tests,
+ time=self.time,
+ timestamp=self.timestamp.isoformat(timespec='seconds') if self.timestamp else None,
+ )
+
+ def get_xml_element(self) -> ET.Element:
+ """Return an XML element representing this instance."""
+ element = ET.Element('testsuite', self.get_attributes())
+
+ if self.properties:
+ ET.SubElement(element, 'properties').extend([ET.Element('property', dict(name=name, value=value)) for name, value in self.properties.items()])
+
+ element.extend([test_case.get_xml_element() for test_case in self.cases])
+
+ if self.system_out:
+ ET.SubElement(element, 'system-out').text = self.system_out
+
+ if self.system_err:
+ ET.SubElement(element, 'system-err').text = self.system_err
+
+ return element
+
+
+@dataclasses.dataclass
+class TestSuites:
+ """A collection of test suites."""
+ name: str | None = None
+
+ suites: list[TestSuite] = dataclasses.field(default_factory=list)
+
+ @property
+ def disabled(self) -> int:
+ """The number of disabled test cases."""
+ return sum(suite.disabled for suite in self.suites)
+
+ @property
+ def errors(self) -> int:
+ """The number of test cases containing error info."""
+ return sum(suite.errors for suite in self.suites)
+
+ @property
+ def failures(self) -> int:
+ """The number of test cases containing failure info."""
+ return sum(suite.failures for suite in self.suites)
+
+ @property
+ def tests(self) -> int:
+ """The number of test cases."""
+ return sum(suite.tests for suite in self.suites)
+
+ @property
+ def time(self) -> decimal.Decimal:
+ """The total time from all test cases."""
+ return decimal.Decimal(sum(suite.time for suite in self.suites))
+
+ def get_attributes(self) -> dict[str, str]:
+ """Return a dictionary of attributes for this instance."""
+ return _attributes(
+ disabled=self.disabled,
+ errors=self.errors,
+ failures=self.failures,
+ name=self.name,
+ tests=self.tests,
+ time=self.time,
+ )
+
+ def get_xml_element(self) -> ET.Element:
+ """Return an XML element representing this instance."""
+ element = ET.Element('testsuites', self.get_attributes())
+ element.extend([suite.get_xml_element() for suite in self.suites])
+
+ return element
+
+ def to_pretty_xml(self) -> str:
+ """Return a pretty formatted XML string representing this instance."""
+ return _pretty_xml(self.get_xml_element())
+
+
+def _attributes(**kwargs) -> dict[str, str]:
+ """Return the given kwargs as a dictionary with values converted to strings. Items with a value of None will be omitted."""
+ return {key: str(value) for key, value in kwargs.items() if value is not None}
+
+
+def _pretty_xml(element: ET.Element) -> str:
+ """Return a pretty formatted XML string representing the given element."""
+ return minidom.parseString(ET.tostring(element, encoding='unicode')).toprettyxml()
diff --git a/lib/ansible/utils/cmd_functions.py b/lib/ansible/utils/cmd_functions.py
new file mode 100644
index 0000000..d4edb2f
--- /dev/null
+++ b/lib/ansible/utils/cmd_functions.py
@@ -0,0 +1,66 @@
+# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os
+import select
+import shlex
+import subprocess
+import sys
+
+from ansible.module_utils._text import to_bytes
+
+
+def run_cmd(cmd, live=False, readsize=10):
+ cmdargs = shlex.split(cmd)
+
+ # subprocess should be passed byte strings.
+ cmdargs = [to_bytes(a, errors='surrogate_or_strict') for a in cmdargs]
+
+ p = subprocess.Popen(cmdargs, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+
+ stdout = b''
+ stderr = b''
+ rpipes = [p.stdout, p.stderr]
+ while True:
+ rfd, wfd, efd = select.select(rpipes, [], rpipes, 1)
+
+ if p.stdout in rfd:
+ dat = os.read(p.stdout.fileno(), readsize)
+ if live:
+ sys.stdout.buffer.write(dat)
+ stdout += dat
+ if dat == b'':
+ rpipes.remove(p.stdout)
+ if p.stderr in rfd:
+ dat = os.read(p.stderr.fileno(), readsize)
+ stderr += dat
+ if live:
+ sys.stdout.buffer.write(dat)
+ if dat == b'':
+ rpipes.remove(p.stderr)
+ # only break out if we've emptied the pipes, or there is nothing to
+ # read from and the process has finished.
+ if (not rpipes or not rfd) and p.poll() is not None:
+ break
+ # Calling wait while there are still pipes to read can cause a lock
+ elif not rpipes and p.poll() is None:
+ p.wait()
+
+ return p.returncode, stdout, stderr
diff --git a/lib/ansible/utils/collection_loader/__init__.py b/lib/ansible/utils/collection_loader/__init__.py
new file mode 100644
index 0000000..83cc246
--- /dev/null
+++ b/lib/ansible/utils/collection_loader/__init__.py
@@ -0,0 +1,26 @@
+# (c) 2019 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# CAUTION: This implementation of the collection loader is used by ansible-test.
+# Because of this, it must be compatible with all Python versions supported on the controller or remote.
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+# FIXME: decide what of this we want to actually be public/toplevel, put other stuff on a utility class?
+from ._collection_config import AnsibleCollectionConfig
+from ._collection_finder import AnsibleCollectionRef
+from ansible.module_utils.common.text.converters import to_text
+
+
+def resource_from_fqcr(ref):
+ """
+ Return resource from a fully-qualified collection reference,
+ or from a simple resource name.
+ For fully-qualified collection references, this is equivalent to
+ ``AnsibleCollectionRef.from_fqcr(ref).resource``.
+ :param ref: collection reference to parse
+ :return: the resource as a unicode string
+ """
+ ref = to_text(ref, errors='strict')
+ return ref.split(u'.')[-1]
diff --git a/lib/ansible/utils/collection_loader/_collection_config.py b/lib/ansible/utils/collection_loader/_collection_config.py
new file mode 100644
index 0000000..4f73a1a
--- /dev/null
+++ b/lib/ansible/utils/collection_loader/_collection_config.py
@@ -0,0 +1,103 @@
+# (c) 2019 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# CAUTION: This implementation of the collection loader is used by ansible-test.
+# Because of this, it must be compatible with all Python versions supported on the controller or remote.
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from ansible.module_utils.common.text.converters import to_text
+from ansible.module_utils.six import add_metaclass
+
+
+class _EventSource:
+ def __init__(self):
+ self._handlers = set()
+
+ def __iadd__(self, handler):
+ if not callable(handler):
+ raise ValueError('handler must be callable')
+ self._handlers.add(handler)
+ return self
+
+ def __isub__(self, handler):
+ try:
+ self._handlers.remove(handler)
+ except KeyError:
+ pass
+
+ return self
+
+ def _on_exception(self, handler, exc, *args, **kwargs):
+ # if we return True, we want the caller to re-raise
+ return True
+
+ def fire(self, *args, **kwargs):
+ for h in self._handlers:
+ try:
+ h(*args, **kwargs)
+ except Exception as ex:
+ if self._on_exception(h, ex, *args, **kwargs):
+ raise
+
+
+class _AnsibleCollectionConfig(type):
+ def __init__(cls, meta, name, bases):
+ cls._collection_finder = None
+ cls._default_collection = None
+ cls._on_collection_load = _EventSource()
+
+ @property
+ def collection_finder(cls):
+ return cls._collection_finder
+
+ @collection_finder.setter
+ def collection_finder(cls, value):
+ if cls._collection_finder:
+ raise ValueError('an AnsibleCollectionFinder has already been configured')
+
+ cls._collection_finder = value
+
+ @property
+ def collection_paths(cls):
+ cls._require_finder()
+ return [to_text(p) for p in cls._collection_finder._n_collection_paths]
+
+ @property
+ def default_collection(cls):
+ return cls._default_collection
+
+ @default_collection.setter
+ def default_collection(cls, value):
+
+ cls._default_collection = value
+
+ @property
+ def on_collection_load(cls):
+ return cls._on_collection_load
+
+ @on_collection_load.setter
+ def on_collection_load(cls, value):
+ if value is not cls._on_collection_load:
+ raise ValueError('on_collection_load is not directly settable (use +=)')
+
+ @property
+ def playbook_paths(cls):
+ cls._require_finder()
+ return [to_text(p) for p in cls._collection_finder._n_playbook_paths]
+
+ @playbook_paths.setter
+ def playbook_paths(cls, value):
+ cls._require_finder()
+ cls._collection_finder.set_playbook_paths(value)
+
+ def _require_finder(cls):
+ if not cls._collection_finder:
+ raise NotImplementedError('an AnsibleCollectionFinder has not been installed in this process')
+
+
+# concrete class of our metaclass type that defines the class properties we want
+@add_metaclass(_AnsibleCollectionConfig)
+class AnsibleCollectionConfig(object):
+ pass
diff --git a/lib/ansible/utils/collection_loader/_collection_finder.py b/lib/ansible/utils/collection_loader/_collection_finder.py
new file mode 100644
index 0000000..d3a8765
--- /dev/null
+++ b/lib/ansible/utils/collection_loader/_collection_finder.py
@@ -0,0 +1,1161 @@
+# (c) 2019 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# CAUTION: This implementation of the collection loader is used by ansible-test.
+# Because of this, it must be compatible with all Python versions supported on the controller or remote.
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os
+import os.path
+import pkgutil
+import re
+import sys
+from keyword import iskeyword
+from tokenize import Name as _VALID_IDENTIFIER_REGEX
+
+
+# DO NOT add new non-stdlib import deps here, this loader is used by external tools (eg ansible-test import sanity)
+# that only allow stdlib and module_utils
+from ansible.module_utils.common.text.converters import to_native, to_text, to_bytes
+from ansible.module_utils.six import string_types, PY3
+from ._collection_config import AnsibleCollectionConfig
+
+from contextlib import contextmanager
+from types import ModuleType
+
+try:
+ from importlib import import_module
+except ImportError:
+ def import_module(name): # type: ignore[misc]
+ __import__(name)
+ return sys.modules[name]
+
+try:
+ from importlib import reload as reload_module
+except ImportError:
+ # 2.7 has a global reload function instead...
+ reload_module = reload # type: ignore[name-defined] # pylint:disable=undefined-variable
+
+try:
+ from importlib.util import spec_from_loader
+except ImportError:
+ pass
+
+try:
+ from importlib.machinery import FileFinder
+except ImportError:
+ HAS_FILE_FINDER = False
+else:
+ HAS_FILE_FINDER = True
+
+# NB: this supports import sanity test providing a different impl
+try:
+ from ._collection_meta import _meta_yml_to_dict
+except ImportError:
+ _meta_yml_to_dict = None
+
+
+if not hasattr(__builtins__, 'ModuleNotFoundError'):
+ # this was introduced in Python 3.6
+ ModuleNotFoundError = ImportError
+
+
+_VALID_IDENTIFIER_STRING_REGEX = re.compile(
+ ''.join((_VALID_IDENTIFIER_REGEX, r'\Z')),
+)
+
+
+try: # NOTE: py3/py2 compat
+ # py2 mypy can't deal with try/excepts
+ is_python_identifier = str.isidentifier # type: ignore[attr-defined]
+except AttributeError: # Python 2
+ def is_python_identifier(self): # type: (str) -> bool
+ """Determine whether the given string is a Python identifier."""
+ # Ref: https://stackoverflow.com/a/55802320/595220
+ return bool(re.match(_VALID_IDENTIFIER_STRING_REGEX, self))
+
+
+PB_EXTENSIONS = ('.yml', '.yaml')
+
+
+class _AnsibleCollectionFinder:
+ def __init__(self, paths=None, scan_sys_paths=True):
+ # TODO: accept metadata loader override
+ self._ansible_pkg_path = to_native(os.path.dirname(to_bytes(sys.modules['ansible'].__file__)))
+
+ if isinstance(paths, string_types):
+ paths = [paths]
+ elif paths is None:
+ paths = []
+
+ # expand any placeholders in configured paths
+ paths = [os.path.expanduser(to_native(p, errors='surrogate_or_strict')) for p in paths]
+
+ # add syspaths if needed
+ if scan_sys_paths:
+ paths.extend(sys.path)
+
+ good_paths = []
+ # expand any placeholders in configured paths
+ for p in paths:
+
+ # ensure we always have ansible_collections
+ if os.path.basename(p) == 'ansible_collections':
+ p = os.path.dirname(p)
+
+ if p not in good_paths and os.path.isdir(to_bytes(os.path.join(p, 'ansible_collections'), errors='surrogate_or_strict')):
+ good_paths.append(p)
+
+ self._n_configured_paths = good_paths
+ self._n_cached_collection_paths = None
+ self._n_cached_collection_qualified_paths = None
+
+ self._n_playbook_paths = []
+
+ @classmethod
+ def _remove(cls):
+ for mps in sys.meta_path:
+ if isinstance(mps, _AnsibleCollectionFinder):
+ sys.meta_path.remove(mps)
+
+ # remove any path hooks that look like ours
+ for ph in sys.path_hooks:
+ if hasattr(ph, '__self__') and isinstance(ph.__self__, _AnsibleCollectionFinder):
+ sys.path_hooks.remove(ph)
+
+ # zap any cached path importer cache entries that might refer to us
+ sys.path_importer_cache.clear()
+
+ AnsibleCollectionConfig._collection_finder = None
+
+ # validate via the public property that we really killed it
+ if AnsibleCollectionConfig.collection_finder is not None:
+ raise AssertionError('_AnsibleCollectionFinder remove did not reset AnsibleCollectionConfig.collection_finder')
+
+ def _install(self):
+ self._remove()
+ sys.meta_path.insert(0, self)
+
+ sys.path_hooks.insert(0, self._ansible_collection_path_hook)
+
+ AnsibleCollectionConfig.collection_finder = self
+
+ def _ansible_collection_path_hook(self, path):
+ path = to_native(path)
+ interesting_paths = self._n_cached_collection_qualified_paths
+ if not interesting_paths:
+ interesting_paths = []
+ for p in self._n_collection_paths:
+ if os.path.basename(p) != 'ansible_collections':
+ p = os.path.join(p, 'ansible_collections')
+
+ if p not in interesting_paths:
+ interesting_paths.append(p)
+
+ interesting_paths.insert(0, self._ansible_pkg_path)
+ self._n_cached_collection_qualified_paths = interesting_paths
+
+ if any(path.startswith(p) for p in interesting_paths):
+ return _AnsiblePathHookFinder(self, path)
+
+ raise ImportError('not interested')
+
+ @property
+ def _n_collection_paths(self):
+ paths = self._n_cached_collection_paths
+ if not paths:
+ self._n_cached_collection_paths = paths = self._n_playbook_paths + self._n_configured_paths
+ return paths
+
+ def set_playbook_paths(self, playbook_paths):
+ if isinstance(playbook_paths, string_types):
+ playbook_paths = [playbook_paths]
+
+ # track visited paths; we have to preserve the dir order as-passed in case there are duplicate collections (first one wins)
+ added_paths = set()
+
+ # de-dupe
+ self._n_playbook_paths = [os.path.join(to_native(p), 'collections') for p in playbook_paths if not (p in added_paths or added_paths.add(p))]
+ self._n_cached_collection_paths = None
+ # HACK: playbook CLI sets this relatively late, so we've already loaded some packages whose paths might depend on this. Fix those up.
+ # NB: this should NOT be used for late additions; ideally we'd fix the playbook dir setup earlier in Ansible init
+ # to prevent this from occurring
+ for pkg in ['ansible_collections', 'ansible_collections.ansible']:
+ self._reload_hack(pkg)
+
+ def _reload_hack(self, fullname):
+ m = sys.modules.get(fullname)
+ if not m:
+ return
+ reload_module(m)
+
+ def _get_loader(self, fullname, path=None):
+ split_name = fullname.split('.')
+ toplevel_pkg = split_name[0]
+ module_to_find = split_name[-1]
+ part_count = len(split_name)
+
+ if toplevel_pkg not in ['ansible', 'ansible_collections']:
+ # not interested in anything other than ansible_collections (and limited cases under ansible)
+ return None
+
+ # sanity check what we're getting from import, canonicalize path values
+ if part_count == 1:
+ if path:
+ raise ValueError('path should not be specified for top-level packages (trying to find {0})'.format(fullname))
+ else:
+ # seed the path to the configured collection roots
+ path = self._n_collection_paths
+
+ if part_count > 1 and path is None:
+ raise ValueError('path must be specified for subpackages (trying to find {0})'.format(fullname))
+
+ if toplevel_pkg == 'ansible':
+ # something under the ansible package, delegate to our internal loader in case of redirections
+ initialize_loader = _AnsibleInternalRedirectLoader
+ elif part_count == 1:
+ initialize_loader = _AnsibleCollectionRootPkgLoader
+ elif part_count == 2: # ns pkg eg, ansible_collections, ansible_collections.somens
+ initialize_loader = _AnsibleCollectionNSPkgLoader
+ elif part_count == 3: # collection pkg eg, ansible_collections.somens.somecoll
+ initialize_loader = _AnsibleCollectionPkgLoader
+ else:
+ # anything below the collection
+ initialize_loader = _AnsibleCollectionLoader
+
+ # NB: actual "find"ing is delegated to the constructors on the various loaders; they'll ImportError if not found
+ try:
+ return initialize_loader(fullname=fullname, path_list=path)
+ except ImportError:
+ # TODO: log attempt to load context
+ return None
+
+ def find_module(self, fullname, path=None):
+ # Figure out what's being asked for, and delegate to a special-purpose loader
+ return self._get_loader(fullname, path)
+
+ def find_spec(self, fullname, path, target=None):
+ loader = self._get_loader(fullname, path)
+
+ if loader is None:
+ return None
+
+ spec = spec_from_loader(fullname, loader)
+ if spec is not None and hasattr(loader, '_subpackage_search_paths'):
+ spec.submodule_search_locations = loader._subpackage_search_paths
+ return spec
+
+
+# Implements a path_hook finder for iter_modules (since it's only path based). This finder does not need to actually
+# function as a finder in most cases, since our meta_path finder is consulted first for *almost* everything, except
+# pkgutil.iter_modules, and under py2, pkgutil.get_data if the parent package passed has not been loaded yet.
+class _AnsiblePathHookFinder:
+ def __init__(self, collection_finder, pathctx):
+ # when called from a path_hook, find_module doesn't usually get the path arg, so this provides our context
+ self._pathctx = to_native(pathctx)
+ self._collection_finder = collection_finder
+ if PY3:
+ # cache the native FileFinder (take advantage of its filesystem cache for future find/load requests)
+ self._file_finder = None
+
+ # class init is fun- this method has a self arg that won't get used
+ def _get_filefinder_path_hook(self=None):
+ _file_finder_hook = None
+ if PY3:
+ # try to find the FileFinder hook to call for fallback path-based imports in Py3
+ _file_finder_hook = [ph for ph in sys.path_hooks if 'FileFinder' in repr(ph)]
+ if len(_file_finder_hook) != 1:
+ raise Exception('need exactly one FileFinder import hook (found {0})'.format(len(_file_finder_hook)))
+ _file_finder_hook = _file_finder_hook[0]
+
+ return _file_finder_hook
+
+ _filefinder_path_hook = _get_filefinder_path_hook()
+
+ def _get_finder(self, fullname):
+ split_name = fullname.split('.')
+ toplevel_pkg = split_name[0]
+
+ if toplevel_pkg == 'ansible_collections':
+ # collections content? delegate to the collection finder
+ return self._collection_finder
+ else:
+ # Something else; we'd normally restrict this to `ansible` descendent modules so that any weird loader
+ # behavior that arbitrary Python modules have can be serviced by those loaders. In some dev/test
+ # scenarios (eg a venv under a collection) our path_hook signs us up to load non-Ansible things, and
+ # it's too late by the time we've reached this point, but also too expensive for the path_hook to figure
+ # out what we *shouldn't* be loading with the limited info it has. So we'll just delegate to the
+ # normal path-based loader as best we can to service it. This also allows us to take advantage of Python's
+ # built-in FS caching and byte-compilation for most things.
+ if PY3:
+ # create or consult our cached file finder for this path
+ if not self._file_finder:
+ try:
+ self._file_finder = _AnsiblePathHookFinder._filefinder_path_hook(self._pathctx)
+ except ImportError:
+ # FUTURE: log at a high logging level? This is normal for things like python36.zip on the path, but
+ # might not be in some other situation...
+ return None
+
+ return self._file_finder
+
+ # call py2's internal loader
+ return pkgutil.ImpImporter(self._pathctx)
+
+ def find_module(self, fullname, path=None):
+ # we ignore the passed in path here- use what we got from the path hook init
+ finder = self._get_finder(fullname)
+
+ if finder is None:
+ return None
+ elif HAS_FILE_FINDER and isinstance(finder, FileFinder):
+ # this codepath is erroneously used under some cases in py3,
+ # and the find_module method on FileFinder does not accept the path arg
+ # see https://github.com/pypa/setuptools/pull/2918
+ return finder.find_module(fullname)
+ else:
+ return finder.find_module(fullname, path=[self._pathctx])
+
+ def find_spec(self, fullname, target=None):
+ split_name = fullname.split('.')
+ toplevel_pkg = split_name[0]
+
+ finder = self._get_finder(fullname)
+
+ if finder is None:
+ return None
+ elif toplevel_pkg == 'ansible_collections':
+ return finder.find_spec(fullname, path=[self._pathctx])
+ else:
+ return finder.find_spec(fullname)
+
+ def iter_modules(self, prefix):
+ # NB: this currently represents only what's on disk, and does not handle package redirection
+ return _iter_modules_impl([self._pathctx], prefix)
+
+ def __repr__(self):
+ return "{0}(path='{1}')".format(self.__class__.__name__, self._pathctx)
+
+
+class _AnsibleCollectionPkgLoaderBase:
+ _allows_package_code = False
+
+ def __init__(self, fullname, path_list=None):
+ self._fullname = fullname
+ self._redirect_module = None
+ self._split_name = fullname.split('.')
+ self._rpart_name = fullname.rpartition('.')
+ self._parent_package_name = self._rpart_name[0] # eg ansible_collections for ansible_collections.somens, '' for toplevel
+ self._package_to_load = self._rpart_name[2] # eg somens for ansible_collections.somens
+
+ self._source_code_path = None
+ self._decoded_source = None
+ self._compiled_code = None
+
+ self._validate_args()
+
+ self._candidate_paths = self._get_candidate_paths([to_native(p) for p in path_list])
+ self._subpackage_search_paths = self._get_subpackage_search_paths(self._candidate_paths)
+
+ self._validate_final()
+
+ # allow subclasses to validate args and sniff split values before we start digging around
+ def _validate_args(self):
+ if self._split_name[0] != 'ansible_collections':
+ raise ImportError('this loader can only load packages from the ansible_collections package, not {0}'.format(self._fullname))
+
+ # allow subclasses to customize candidate path filtering
+ def _get_candidate_paths(self, path_list):
+ return [os.path.join(p, self._package_to_load) for p in path_list]
+
+ # allow subclasses to customize finding paths
+ def _get_subpackage_search_paths(self, candidate_paths):
+ # filter candidate paths for existence (NB: silently ignoring package init code and same-named modules)
+ return [p for p in candidate_paths if os.path.isdir(to_bytes(p))]
+
+ # allow subclasses to customize state validation/manipulation before we return the loader instance
+ def _validate_final(self):
+ return
+
+ @staticmethod
+ @contextmanager
+ def _new_or_existing_module(name, **kwargs):
+ # handle all-or-nothing sys.modules creation/use-existing/delete-on-exception-if-created behavior
+ created_module = False
+ module = sys.modules.get(name)
+ try:
+ if not module:
+ module = ModuleType(name)
+ created_module = True
+ sys.modules[name] = module
+ # always override the values passed, except name (allow reference aliasing)
+ for attr, value in kwargs.items():
+ setattr(module, attr, value)
+ yield module
+ except Exception:
+ if created_module:
+ if sys.modules.get(name):
+ sys.modules.pop(name)
+ raise
+
+ # basic module/package location support
+ # NB: this does not support distributed packages!
+ @staticmethod
+ def _module_file_from_path(leaf_name, path):
+ has_code = True
+ package_path = os.path.join(to_native(path), to_native(leaf_name))
+ module_path = None
+
+ # if the submodule is a package, assemble valid submodule paths, but stop looking for a module
+ if os.path.isdir(to_bytes(package_path)):
+ # is there a package init?
+ module_path = os.path.join(package_path, '__init__.py')
+ if not os.path.isfile(to_bytes(module_path)):
+ module_path = os.path.join(package_path, '__synthetic__')
+ has_code = False
+ else:
+ module_path = package_path + '.py'
+ package_path = None
+ if not os.path.isfile(to_bytes(module_path)):
+ raise ImportError('{0} not found at {1}'.format(leaf_name, path))
+
+ return module_path, has_code, package_path
+
+ def exec_module(self, module):
+ # short-circuit redirect; avoid reinitializing existing modules
+ if self._redirect_module:
+ return
+
+ # execute the module's code in its namespace
+ code_obj = self.get_code(self._fullname)
+ if code_obj is not None: # things like NS packages that can't have code on disk will return None
+ exec(code_obj, module.__dict__)
+
+ def create_module(self, spec):
+ # short-circuit redirect; we've already imported the redirected module, so just alias it and return it
+ if self._redirect_module:
+ return self._redirect_module
+ else:
+ return None
+
+ def load_module(self, fullname):
+ # short-circuit redirect; we've already imported the redirected module, so just alias it and return it
+ if self._redirect_module:
+ sys.modules[self._fullname] = self._redirect_module
+ return self._redirect_module
+
+ # we're actually loading a module/package
+ module_attrs = dict(
+ __loader__=self,
+ __file__=self.get_filename(fullname),
+ __package__=self._parent_package_name # sane default for non-packages
+ )
+
+ # eg, I am a package
+ if self._subpackage_search_paths is not None: # empty is legal
+ module_attrs['__path__'] = self._subpackage_search_paths
+ module_attrs['__package__'] = fullname # per PEP366
+
+ with self._new_or_existing_module(fullname, **module_attrs) as module:
+ # execute the module's code in its namespace
+ code_obj = self.get_code(fullname)
+ if code_obj is not None: # things like NS packages that can't have code on disk will return None
+ exec(code_obj, module.__dict__)
+
+ return module
+
+ def is_package(self, fullname):
+ if fullname != self._fullname:
+ raise ValueError('this loader cannot answer is_package for {0}, only {1}'.format(fullname, self._fullname))
+ return self._subpackage_search_paths is not None
+
+ def get_source(self, fullname):
+ if self._decoded_source:
+ return self._decoded_source
+ if fullname != self._fullname:
+ raise ValueError('this loader cannot load source for {0}, only {1}'.format(fullname, self._fullname))
+ if not self._source_code_path:
+ return None
+ # FIXME: what do we want encoding/newline requirements to be?
+ self._decoded_source = self.get_data(self._source_code_path)
+ return self._decoded_source
+
+ def get_data(self, path):
+ if not path:
+ raise ValueError('a path must be specified')
+
+ # TODO: ensure we're being asked for a path below something we own
+ # TODO: try to handle redirects internally?
+
+ if not path[0] == '/':
+ # relative to current package, search package paths if possible (this may not be necessary)
+ # candidate_paths = [os.path.join(ssp, path) for ssp in self._subpackage_search_paths]
+ raise ValueError('relative resource paths not supported')
+ else:
+ candidate_paths = [path]
+
+ for p in candidate_paths:
+ b_path = to_bytes(p)
+ if os.path.isfile(b_path):
+ with open(b_path, 'rb') as fd:
+ return fd.read()
+ # HACK: if caller asks for __init__.py and the parent dir exists, return empty string (this keep consistency
+ # with "collection subpackages don't require __init__.py" working everywhere with get_data
+ elif b_path.endswith(b'__init__.py') and os.path.isdir(os.path.dirname(b_path)):
+ return ''
+
+ return None
+
+ def _synthetic_filename(self, fullname):
+ return '<ansible_synthetic_collection_package>'
+
+ def get_filename(self, fullname):
+ if fullname != self._fullname:
+ raise ValueError('this loader cannot find files for {0}, only {1}'.format(fullname, self._fullname))
+
+ filename = self._source_code_path
+
+ if not filename and self.is_package(fullname):
+ if len(self._subpackage_search_paths) == 1:
+ filename = os.path.join(self._subpackage_search_paths[0], '__synthetic__')
+ else:
+ filename = self._synthetic_filename(fullname)
+
+ return filename
+
+ def get_code(self, fullname):
+ if self._compiled_code:
+ return self._compiled_code
+
+ # this may or may not be an actual filename, but it's the value we'll use for __file__
+ filename = self.get_filename(fullname)
+ if not filename:
+ filename = '<string>'
+
+ source_code = self.get_source(fullname)
+
+ # for things like synthetic modules that really have no source on disk, don't return a code object at all
+ # vs things like an empty package init (which has an empty string source on disk)
+ if source_code is None:
+ return None
+
+ self._compiled_code = compile(source=source_code, filename=filename, mode='exec', flags=0, dont_inherit=True)
+
+ return self._compiled_code
+
+ def iter_modules(self, prefix):
+ return _iter_modules_impl(self._subpackage_search_paths, prefix)
+
+ def __repr__(self):
+ return '{0}(path={1})'.format(self.__class__.__name__, self._subpackage_search_paths or self._source_code_path)
+
+
+class _AnsibleCollectionRootPkgLoader(_AnsibleCollectionPkgLoaderBase):
+ def _validate_args(self):
+ super(_AnsibleCollectionRootPkgLoader, self)._validate_args()
+ if len(self._split_name) != 1:
+ raise ImportError('this loader can only load the ansible_collections toplevel package, not {0}'.format(self._fullname))
+
+
+# Implements Ansible's custom namespace package support.
+# The ansible_collections package and one level down (collections namespaces) are Python namespace packages
+# that search across all configured collection roots. The collection package (two levels down) is the first one found
+# on the configured collection root path, and Python namespace package aggregation is not allowed at or below
+# the collection. Implements implicit package (package dir) support for both Py2/3. Package init code is ignored
+# by this loader.
+class _AnsibleCollectionNSPkgLoader(_AnsibleCollectionPkgLoaderBase):
+ def _validate_args(self):
+ super(_AnsibleCollectionNSPkgLoader, self)._validate_args()
+ if len(self._split_name) != 2:
+ raise ImportError('this loader can only load collections namespace packages, not {0}'.format(self._fullname))
+
+ def _validate_final(self):
+ # special-case the `ansible` namespace, since `ansible.builtin` is magical
+ if not self._subpackage_search_paths and self._package_to_load != 'ansible':
+ raise ImportError('no {0} found in {1}'.format(self._package_to_load, self._candidate_paths))
+
+
+# handles locating the actual collection package and associated metadata
+class _AnsibleCollectionPkgLoader(_AnsibleCollectionPkgLoaderBase):
+ def _validate_args(self):
+ super(_AnsibleCollectionPkgLoader, self)._validate_args()
+ if len(self._split_name) != 3:
+ raise ImportError('this loader can only load collection packages, not {0}'.format(self._fullname))
+
+ def _validate_final(self):
+ if self._split_name[1:3] == ['ansible', 'builtin']:
+ # we don't want to allow this one to have on-disk search capability
+ self._subpackage_search_paths = []
+ elif not self._subpackage_search_paths:
+ raise ImportError('no {0} found in {1}'.format(self._package_to_load, self._candidate_paths))
+ else:
+ # only search within the first collection we found
+ self._subpackage_search_paths = [self._subpackage_search_paths[0]]
+
+ def _load_module(self, module):
+ if not _meta_yml_to_dict:
+ raise ValueError('ansible.utils.collection_loader._meta_yml_to_dict is not set')
+
+ module._collection_meta = {}
+ # TODO: load collection metadata, cache in __loader__ state
+
+ collection_name = '.'.join(self._split_name[1:3])
+
+ if collection_name == 'ansible.builtin':
+ # ansible.builtin is a synthetic collection, get its routing config from the Ansible distro
+ ansible_pkg_path = os.path.dirname(import_module('ansible').__file__)
+ metadata_path = os.path.join(ansible_pkg_path, 'config/ansible_builtin_runtime.yml')
+ with open(to_bytes(metadata_path), 'rb') as fd:
+ raw_routing = fd.read()
+ else:
+ b_routing_meta_path = to_bytes(os.path.join(module.__path__[0], 'meta/runtime.yml'))
+ if os.path.isfile(b_routing_meta_path):
+ with open(b_routing_meta_path, 'rb') as fd:
+ raw_routing = fd.read()
+ else:
+ raw_routing = ''
+ try:
+ if raw_routing:
+ routing_dict = _meta_yml_to_dict(raw_routing, (collection_name, 'runtime.yml'))
+ module._collection_meta = self._canonicalize_meta(routing_dict)
+ except Exception as ex:
+ raise ValueError('error parsing collection metadata: {0}'.format(to_native(ex)))
+
+ AnsibleCollectionConfig.on_collection_load.fire(collection_name=collection_name, collection_path=os.path.dirname(module.__file__))
+
+ return module
+
+ def exec_module(self, module):
+ super(_AnsibleCollectionPkgLoader, self).exec_module(module)
+ self._load_module(module)
+
+ def create_module(self, spec):
+ return None
+
+ def load_module(self, fullname):
+ module = super(_AnsibleCollectionPkgLoader, self).load_module(fullname)
+ return self._load_module(module)
+
+ def _canonicalize_meta(self, meta_dict):
+ # TODO: rewrite import keys and all redirect targets that start with .. (current namespace) and . (current collection)
+ # OR we could do it all on the fly?
+ # if not meta_dict:
+ # return {}
+ #
+ # ns_name = '.'.join(self._split_name[0:2])
+ # collection_name = '.'.join(self._split_name[0:3])
+ #
+ # #
+ # for routing_type, routing_type_dict in iteritems(meta_dict.get('plugin_routing', {})):
+ # for plugin_key, plugin_dict in iteritems(routing_type_dict):
+ # redirect = plugin_dict.get('redirect', '')
+ # if redirect.startswith('..'):
+ # redirect = redirect[2:]
+
+ return meta_dict
+
+
+# loads everything under a collection, including handling redirections defined by the collection
+class _AnsibleCollectionLoader(_AnsibleCollectionPkgLoaderBase):
+ # HACK: stash this in a better place
+ _redirected_package_map = {} # type: dict[str, str]
+ _allows_package_code = True
+
+ def _validate_args(self):
+ super(_AnsibleCollectionLoader, self)._validate_args()
+ if len(self._split_name) < 4:
+ raise ValueError('this loader is only for sub-collection modules/packages, not {0}'.format(self._fullname))
+
+ def _get_candidate_paths(self, path_list):
+ if len(path_list) != 1 and self._split_name[1:3] != ['ansible', 'builtin']:
+ raise ValueError('this loader requires exactly one path to search')
+
+ return path_list
+
+ def _get_subpackage_search_paths(self, candidate_paths):
+ collection_name = '.'.join(self._split_name[1:3])
+ collection_meta = _get_collection_metadata(collection_name)
+
+ # check for explicit redirection, as well as ancestor package-level redirection (only load the actual code once!)
+ redirect = None
+ explicit_redirect = False
+
+ routing_entry = _nested_dict_get(collection_meta, ['import_redirection', self._fullname])
+ if routing_entry:
+ redirect = routing_entry.get('redirect')
+
+ if redirect:
+ explicit_redirect = True
+ else:
+ redirect = _get_ancestor_redirect(self._redirected_package_map, self._fullname)
+
+ # NB: package level redirection requires hooking all future imports beneath the redirected source package
+ # in order to ensure sanity on future relative imports. We always import everything under its "real" name,
+ # then add a sys.modules entry with the redirected name using the same module instance. If we naively imported
+ # the source for each redirection, most submodules would import OK, but we'd have N runtime copies of the module
+ # (one for each name), and relative imports that ascend above the redirected package would break (since they'd
+ # see the redirected ancestor package contents instead of the package where they actually live).
+ if redirect:
+ # FIXME: wrap this so we can be explicit about a failed redirection
+ self._redirect_module = import_module(redirect)
+ if explicit_redirect and hasattr(self._redirect_module, '__path__') and self._redirect_module.__path__:
+ # if the import target looks like a package, store its name so we can rewrite future descendent loads
+ self._redirected_package_map[self._fullname] = redirect
+
+ # if we redirected, don't do any further custom package logic
+ return None
+
+ # we're not doing a redirect- try to find what we need to actually load a module/package
+
+ # this will raise ImportError if we can't find the requested module/package at all
+ if not candidate_paths:
+ # noplace to look, just ImportError
+ raise ImportError('package has no paths')
+
+ found_path, has_code, package_path = self._module_file_from_path(self._package_to_load, candidate_paths[0])
+
+ # still here? we found something to load...
+ if has_code:
+ self._source_code_path = found_path
+
+ if package_path:
+ return [package_path] # always needs to be a list
+
+ return None
+
+
+# This loader only answers for intercepted Ansible Python modules. Normal imports will fail here and be picked up later
+# by our path_hook importer (which proxies the built-in import mechanisms, allowing normal caching etc to occur)
+class _AnsibleInternalRedirectLoader:
+ def __init__(self, fullname, path_list):
+ self._redirect = None
+
+ split_name = fullname.split('.')
+ toplevel_pkg = split_name[0]
+ module_to_load = split_name[-1]
+
+ if toplevel_pkg != 'ansible':
+ raise ImportError('not interested')
+
+ builtin_meta = _get_collection_metadata('ansible.builtin')
+
+ routing_entry = _nested_dict_get(builtin_meta, ['import_redirection', fullname])
+ if routing_entry:
+ self._redirect = routing_entry.get('redirect')
+
+ if not self._redirect:
+ raise ImportError('not redirected, go ask path_hook')
+
+ def exec_module(self, module):
+ # should never see this
+ if not self._redirect:
+ raise ValueError('no redirect found for {0}'.format(module.__spec__.name))
+
+ # Replace the module with the redirect
+ sys.modules[module.__spec__.name] = import_module(self._redirect)
+
+ def create_module(self, spec):
+ return None
+
+ def load_module(self, fullname):
+ # since we're delegating to other loaders, this should only be called for internal redirects where we answered
+ # find_module with this loader, in which case we'll just directly import the redirection target, insert it into
+ # sys.modules under the name it was requested by, and return the original module.
+
+ # should never see this
+ if not self._redirect:
+ raise ValueError('no redirect found for {0}'.format(fullname))
+
+ # FIXME: smuggle redirection context, provide warning/error that we tried and failed to redirect
+ mod = import_module(self._redirect)
+ sys.modules[fullname] = mod
+ return mod
+
+
+class AnsibleCollectionRef:
+ # FUTURE: introspect plugin loaders to get these dynamically?
+ VALID_REF_TYPES = frozenset(to_text(r) for r in ['action', 'become', 'cache', 'callback', 'cliconf', 'connection',
+ 'doc_fragments', 'filter', 'httpapi', 'inventory', 'lookup',
+ 'module_utils', 'modules', 'netconf', 'role', 'shell', 'strategy',
+ 'terminal', 'test', 'vars', 'playbook'])
+
+ # FIXME: tighten this up to match Python identifier reqs, etc
+ VALID_SUBDIRS_RE = re.compile(to_text(r'^\w+(\.\w+)*$'))
+ VALID_FQCR_RE = re.compile(to_text(r'^\w+(\.\w+){2,}$')) # can have 0-N included subdirs as well
+
+ def __init__(self, collection_name, subdirs, resource, ref_type):
+ """
+ Create an AnsibleCollectionRef from components
+ :param collection_name: a collection name of the form 'namespace.collectionname'
+ :param subdirs: optional subdir segments to be appended below the plugin type (eg, 'subdir1.subdir2')
+ :param resource: the name of the resource being references (eg, 'mymodule', 'someaction', 'a_role')
+ :param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment'
+ """
+ collection_name = to_text(collection_name, errors='strict')
+ if subdirs is not None:
+ subdirs = to_text(subdirs, errors='strict')
+ resource = to_text(resource, errors='strict')
+ ref_type = to_text(ref_type, errors='strict')
+
+ if not self.is_valid_collection_name(collection_name):
+ raise ValueError('invalid collection name (must be of the form namespace.collection): {0}'.format(to_native(collection_name)))
+
+ if ref_type not in self.VALID_REF_TYPES:
+ raise ValueError('invalid collection ref_type: {0}'.format(ref_type))
+
+ self.collection = collection_name
+ if subdirs:
+ if not re.match(self.VALID_SUBDIRS_RE, subdirs):
+ raise ValueError('invalid subdirs entry: {0} (must be empty/None or of the form subdir1.subdir2)'.format(to_native(subdirs)))
+ self.subdirs = subdirs
+ else:
+ self.subdirs = u''
+
+ self.resource = resource
+ self.ref_type = ref_type
+
+ package_components = [u'ansible_collections', self.collection]
+ fqcr_components = [self.collection]
+
+ self.n_python_collection_package_name = to_native('.'.join(package_components))
+
+ if self.ref_type == u'role':
+ package_components.append(u'roles')
+ elif self.ref_type == u'playbook':
+ package_components.append(u'playbooks')
+ else:
+ # we assume it's a plugin
+ package_components += [u'plugins', self.ref_type]
+
+ if self.subdirs:
+ package_components.append(self.subdirs)
+ fqcr_components.append(self.subdirs)
+
+ if self.ref_type in (u'role', u'playbook'):
+ # playbooks and roles are their own resource
+ package_components.append(self.resource)
+
+ fqcr_components.append(self.resource)
+
+ self.n_python_package_name = to_native('.'.join(package_components))
+ self._fqcr = u'.'.join(fqcr_components)
+
+ def __repr__(self):
+ return 'AnsibleCollectionRef(collection={0!r}, subdirs={1!r}, resource={2!r})'.format(self.collection, self.subdirs, self.resource)
+
+ @property
+ def fqcr(self):
+ return self._fqcr
+
+ @staticmethod
+ def from_fqcr(ref, ref_type):
+ """
+ Parse a string as a fully-qualified collection reference, raises ValueError if invalid
+ :param ref: collection reference to parse (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource')
+ :param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment'
+ :return: a populated AnsibleCollectionRef object
+ """
+ # assuming the fq_name is of the form (ns).(coll).(optional_subdir_N).(resource_name),
+ # we split the resource name off the right, split ns and coll off the left, and we're left with any optional
+ # subdirs that need to be added back below the plugin-specific subdir we'll add. So:
+ # ns.coll.resource -> ansible_collections.ns.coll.plugins.(plugintype).resource
+ # ns.coll.subdir1.resource -> ansible_collections.ns.coll.plugins.subdir1.(plugintype).resource
+ # ns.coll.rolename -> ansible_collections.ns.coll.roles.rolename
+ if not AnsibleCollectionRef.is_valid_fqcr(ref):
+ raise ValueError('{0} is not a valid collection reference'.format(to_native(ref)))
+
+ ref = to_text(ref, errors='strict')
+ ref_type = to_text(ref_type, errors='strict')
+ ext = ''
+
+ if ref_type == u'playbook' and ref.endswith(PB_EXTENSIONS):
+ resource_splitname = ref.rsplit(u'.', 2)
+ package_remnant = resource_splitname[0]
+ resource = resource_splitname[1]
+ ext = '.' + resource_splitname[2]
+ else:
+ resource_splitname = ref.rsplit(u'.', 1)
+ package_remnant = resource_splitname[0]
+ resource = resource_splitname[1]
+
+ # split the left two components of the collection package name off, anything remaining is plugin-type
+ # specific subdirs to be added back on below the plugin type
+ package_splitname = package_remnant.split(u'.', 2)
+ if len(package_splitname) == 3:
+ subdirs = package_splitname[2]
+ else:
+ subdirs = u''
+
+ collection_name = u'.'.join(package_splitname[0:2])
+
+ return AnsibleCollectionRef(collection_name, subdirs, resource + ext, ref_type)
+
+ @staticmethod
+ def try_parse_fqcr(ref, ref_type):
+ """
+ Attempt to parse a string as a fully-qualified collection reference, returning None on failure (instead of raising an error)
+ :param ref: collection reference to parse (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource')
+ :param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment'
+ :return: a populated AnsibleCollectionRef object on successful parsing, else None
+ """
+ try:
+ return AnsibleCollectionRef.from_fqcr(ref, ref_type)
+ except ValueError:
+ pass
+
+ @staticmethod
+ def legacy_plugin_dir_to_plugin_type(legacy_plugin_dir_name):
+ """
+ Utility method to convert from a PluginLoader dir name to a plugin ref_type
+ :param legacy_plugin_dir_name: PluginLoader dir name (eg, 'action_plugins', 'library')
+ :return: the corresponding plugin ref_type (eg, 'action', 'role')
+ """
+ legacy_plugin_dir_name = to_text(legacy_plugin_dir_name)
+
+ plugin_type = legacy_plugin_dir_name.removesuffix(u'_plugins')
+
+ if plugin_type == u'library':
+ plugin_type = u'modules'
+
+ if plugin_type not in AnsibleCollectionRef.VALID_REF_TYPES:
+ raise ValueError('{0} cannot be mapped to a valid collection ref type'.format(to_native(legacy_plugin_dir_name)))
+
+ return plugin_type
+
+ @staticmethod
+ def is_valid_fqcr(ref, ref_type=None):
+ """
+ Validates if is string is a well-formed fully-qualified collection reference (does not look up the collection itself)
+ :param ref: candidate collection reference to validate (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource')
+ :param ref_type: optional reference type to enable deeper validation, eg 'module', 'role', 'doc_fragment'
+ :return: True if the collection ref passed is well-formed, False otherwise
+ """
+
+ ref = to_text(ref)
+
+ if not ref_type:
+ return bool(re.match(AnsibleCollectionRef.VALID_FQCR_RE, ref))
+
+ return bool(AnsibleCollectionRef.try_parse_fqcr(ref, ref_type))
+
+ @staticmethod
+ def is_valid_collection_name(collection_name):
+ """
+ Validates if the given string is a well-formed collection name (does not look up the collection itself)
+ :param collection_name: candidate collection name to validate (a valid name is of the form 'ns.collname')
+ :return: True if the collection name passed is well-formed, False otherwise
+ """
+
+ collection_name = to_text(collection_name)
+
+ if collection_name.count(u'.') != 1:
+ return False
+
+ return all(
+ # NOTE: keywords and identifiers are different in different Pythons
+ not iskeyword(ns_or_name) and is_python_identifier(ns_or_name)
+ for ns_or_name in collection_name.split(u'.')
+ )
+
+
+def _get_collection_path(collection_name):
+ collection_name = to_native(collection_name)
+ if not collection_name or not isinstance(collection_name, string_types) or len(collection_name.split('.')) != 2:
+ raise ValueError('collection_name must be a non-empty string of the form namespace.collection')
+ try:
+ collection_pkg = import_module('ansible_collections.' + collection_name)
+ except ImportError:
+ raise ValueError('unable to locate collection {0}'.format(collection_name))
+
+ return to_native(os.path.dirname(to_bytes(collection_pkg.__file__)))
+
+
+def _get_collection_playbook_path(playbook):
+
+ acr = AnsibleCollectionRef.try_parse_fqcr(playbook, u'playbook')
+ if acr:
+ try:
+ # get_collection_path
+ pkg = import_module(acr.n_python_collection_package_name)
+ except (IOError, ModuleNotFoundError) as e:
+ # leaving e as debug target, even though not used in normal code
+ pkg = None
+
+ if pkg:
+ cpath = os.path.join(sys.modules[acr.n_python_collection_package_name].__file__.replace('__synthetic__', 'playbooks'))
+
+ if acr.subdirs:
+ paths = [to_native(x) for x in acr.subdirs.split(u'.')]
+ paths.insert(0, cpath)
+ cpath = os.path.join(*paths)
+
+ path = os.path.join(cpath, to_native(acr.resource))
+ if os.path.exists(to_bytes(path)):
+ return acr.resource, path, acr.collection
+ elif not acr.resource.endswith(PB_EXTENSIONS):
+ for ext in PB_EXTENSIONS:
+ path = os.path.join(cpath, to_native(acr.resource + ext))
+ if os.path.exists(to_bytes(path)):
+ return acr.resource, path, acr.collection
+ return None
+
+
+def _get_collection_role_path(role_name, collection_list=None):
+ return _get_collection_resource_path(role_name, u'role', collection_list)
+
+
+def _get_collection_resource_path(name, ref_type, collection_list=None):
+
+ if ref_type == u'playbook':
+ # they are handled a bit diff due to 'extension variance' and no collection_list
+ return _get_collection_playbook_path(name)
+
+ acr = AnsibleCollectionRef.try_parse_fqcr(name, ref_type)
+ if acr:
+ # looks like a valid qualified collection ref; skip the collection_list
+ collection_list = [acr.collection]
+ subdirs = acr.subdirs
+ resource = acr.resource
+ elif not collection_list:
+ return None # not a FQ and no collection search list spec'd, nothing to do
+ else:
+ resource = name # treat as unqualified, loop through the collection search list to try and resolve
+ subdirs = ''
+
+ for collection_name in collection_list:
+ try:
+ acr = AnsibleCollectionRef(collection_name=collection_name, subdirs=subdirs, resource=resource, ref_type=ref_type)
+ # FIXME: error handling/logging; need to catch any import failures and move along
+ pkg = import_module(acr.n_python_package_name)
+
+ if pkg is not None:
+ # the package is now loaded, get the collection's package and ask where it lives
+ path = os.path.dirname(to_bytes(sys.modules[acr.n_python_package_name].__file__, errors='surrogate_or_strict'))
+ return resource, to_text(path, errors='surrogate_or_strict'), collection_name
+
+ except (IOError, ModuleNotFoundError) as e:
+ continue
+ except Exception as ex:
+ # FIXME: pick out typical import errors first, then error logging
+ continue
+
+ return None
+
+
+def _get_collection_name_from_path(path):
+ """
+ Return the containing collection name for a given path, or None if the path is not below a configured collection, or
+ the collection cannot be loaded (eg, the collection is masked by another of the same name higher in the configured
+ collection roots).
+ :param path: path to evaluate for collection containment
+ :return: collection name or None
+ """
+
+ # ensure we compare full paths since pkg path will be abspath
+ path = to_native(os.path.abspath(to_bytes(path)))
+
+ path_parts = path.split('/')
+ if path_parts.count('ansible_collections') != 1:
+ return None
+
+ ac_pos = path_parts.index('ansible_collections')
+
+ # make sure it's followed by at least a namespace and collection name
+ if len(path_parts) < ac_pos + 3:
+ return None
+
+ candidate_collection_name = '.'.join(path_parts[ac_pos + 1:ac_pos + 3])
+
+ try:
+ # we've got a name for it, now see if the path prefix matches what the loader sees
+ imported_pkg_path = to_native(os.path.dirname(to_bytes(import_module('ansible_collections.' + candidate_collection_name).__file__)))
+ except ImportError:
+ return None
+
+ # reassemble the original path prefix up the collection name, and it should match what we just imported. If not
+ # this is probably a collection root that's not configured.
+
+ original_path_prefix = os.path.join('/', *path_parts[0:ac_pos + 3])
+
+ imported_pkg_path = to_native(os.path.abspath(to_bytes(imported_pkg_path)))
+ if original_path_prefix != imported_pkg_path:
+ return None
+
+ return candidate_collection_name
+
+
+def _get_import_redirect(collection_meta_dict, fullname):
+ if not collection_meta_dict:
+ return None
+
+ return _nested_dict_get(collection_meta_dict, ['import_redirection', fullname, 'redirect'])
+
+
+def _get_ancestor_redirect(redirected_package_map, fullname):
+ # walk the requested module's ancestor packages to see if any have been previously redirected
+ cur_pkg = fullname
+ while cur_pkg:
+ cur_pkg = cur_pkg.rpartition('.')[0]
+ ancestor_redirect = redirected_package_map.get(cur_pkg)
+ if ancestor_redirect:
+ # rewrite the prefix on fullname so we import the target first, then alias it
+ redirect = ancestor_redirect + fullname[len(cur_pkg):]
+ return redirect
+ return None
+
+
+def _nested_dict_get(root_dict, key_list):
+ cur_value = root_dict
+ for key in key_list:
+ cur_value = cur_value.get(key)
+ if not cur_value:
+ return None
+
+ return cur_value
+
+
+def _iter_modules_impl(paths, prefix=''):
+ # NB: this currently only iterates what's on disk- redirected modules are not considered
+ if not prefix:
+ prefix = ''
+ else:
+ prefix = to_native(prefix)
+ # yield (module_loader, name, ispkg) for each module/pkg under path
+ # TODO: implement ignore/silent catch for unreadable?
+ for b_path in map(to_bytes, paths):
+ if not os.path.isdir(b_path):
+ continue
+ for b_basename in sorted(os.listdir(b_path)):
+ b_candidate_module_path = os.path.join(b_path, b_basename)
+ if os.path.isdir(b_candidate_module_path):
+ # exclude things that obviously aren't Python package dirs
+ # FIXME: this dir is adjustable in py3.8+, check for it
+ if b'.' in b_basename or b_basename == b'__pycache__':
+ continue
+
+ # TODO: proper string handling?
+ yield prefix + to_native(b_basename), True
+ else:
+ # FIXME: match builtin ordering for package/dir/file, support compiled?
+ if b_basename.endswith(b'.py') and b_basename != b'__init__.py':
+ yield prefix + to_native(os.path.splitext(b_basename)[0]), False
+
+
+def _get_collection_metadata(collection_name):
+ collection_name = to_native(collection_name)
+ if not collection_name or not isinstance(collection_name, string_types) or len(collection_name.split('.')) != 2:
+ raise ValueError('collection_name must be a non-empty string of the form namespace.collection')
+
+ try:
+ collection_pkg = import_module('ansible_collections.' + collection_name)
+ except ImportError:
+ raise ValueError('unable to locate collection {0}'.format(collection_name))
+
+ _collection_meta = getattr(collection_pkg, '_collection_meta', None)
+
+ if _collection_meta is None:
+ raise ValueError('collection metadata was not loaded for collection {0}'.format(collection_name))
+
+ return _collection_meta
diff --git a/lib/ansible/utils/collection_loader/_collection_meta.py b/lib/ansible/utils/collection_loader/_collection_meta.py
new file mode 100644
index 0000000..deaac8e
--- /dev/null
+++ b/lib/ansible/utils/collection_loader/_collection_meta.py
@@ -0,0 +1,32 @@
+# (c) 2019 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# CAUTION: This implementation of the collection loader is used by ansible-test.
+# Because of this, it must be compatible with all Python versions supported on the controller or remote.
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+try:
+ from collections.abc import Mapping
+except ImportError:
+ from collections import Mapping # type: ignore[no-redef,attr-defined] # pylint: disable=ansible-bad-import-from
+
+from ansible.module_utils.common.yaml import yaml_load
+
+
+def _meta_yml_to_dict(yaml_string_data, content_id):
+ """
+ Converts string YAML dictionary to a Python dictionary. This function may be monkeypatched to another implementation
+ by some tools (eg the import sanity test).
+ :param yaml_string_data: a bytes-ish YAML dictionary
+ :param content_id: a unique ID representing the content to allow other implementations to cache the output
+ :return: a Python dictionary representing the YAML dictionary content
+ """
+ # NB: content_id is passed in, but not used by this implementation
+ routing_dict = yaml_load(yaml_string_data)
+ if not routing_dict:
+ routing_dict = {}
+ if not isinstance(routing_dict, Mapping):
+ raise ValueError('collection metadata must be an instance of Python Mapping')
+ return routing_dict
diff --git a/lib/ansible/utils/color.py b/lib/ansible/utils/color.py
new file mode 100644
index 0000000..be8fb00
--- /dev/null
+++ b/lib/ansible/utils/color.py
@@ -0,0 +1,112 @@
+# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import re
+import sys
+
+from ansible import constants as C
+
+ANSIBLE_COLOR = True
+if C.ANSIBLE_NOCOLOR:
+ ANSIBLE_COLOR = False
+elif not hasattr(sys.stdout, 'isatty') or not sys.stdout.isatty():
+ ANSIBLE_COLOR = False
+else:
+ try:
+ import curses
+ curses.setupterm()
+ if curses.tigetnum('colors') < 0:
+ ANSIBLE_COLOR = False
+ except ImportError:
+ # curses library was not found
+ pass
+ except curses.error:
+ # curses returns an error (e.g. could not find terminal)
+ ANSIBLE_COLOR = False
+
+if C.ANSIBLE_FORCE_COLOR:
+ ANSIBLE_COLOR = True
+
+# --- begin "pretty"
+#
+# pretty - A miniature library that provides a Python print and stdout
+# wrapper that makes colored terminal text easier to use (e.g. without
+# having to mess around with ANSI escape sequences). This code is public
+# domain - there is no license except that you must leave this header.
+#
+# Copyright (C) 2008 Brian Nez <thedude at bri1 dot com>
+
+
+def parsecolor(color):
+ """SGR parameter string for the specified color name."""
+ matches = re.match(r"color(?P<color>[0-9]+)"
+ r"|(?P<rgb>rgb(?P<red>[0-5])(?P<green>[0-5])(?P<blue>[0-5]))"
+ r"|gray(?P<gray>[0-9]+)", color)
+ if not matches:
+ return C.COLOR_CODES[color]
+ if matches.group('color'):
+ return u'38;5;%d' % int(matches.group('color'))
+ if matches.group('rgb'):
+ return u'38;5;%d' % (16 + 36 * int(matches.group('red')) +
+ 6 * int(matches.group('green')) +
+ int(matches.group('blue')))
+ if matches.group('gray'):
+ return u'38;5;%d' % (232 + int(matches.group('gray')))
+
+
+def stringc(text, color, wrap_nonvisible_chars=False):
+ """String in color."""
+
+ if ANSIBLE_COLOR:
+ color_code = parsecolor(color)
+ fmt = u"\033[%sm%s\033[0m"
+ if wrap_nonvisible_chars:
+ # This option is provided for use in cases when the
+ # formatting of a command line prompt is needed, such as
+ # `ansible-console`. As said in `readline` sources:
+ # readline/display.c:321
+ # /* Current implementation:
+ # \001 (^A) start non-visible characters
+ # \002 (^B) end non-visible characters
+ # all characters except \001 and \002 (following a \001) are copied to
+ # the returned string; all characters except those between \001 and
+ # \002 are assumed to be `visible'. */
+ fmt = u"\001\033[%sm\002%s\001\033[0m\002"
+ return u"\n".join([fmt % (color_code, t) for t in text.split(u'\n')])
+ else:
+ return text
+
+
+def colorize(lead, num, color):
+ """ Print 'lead' = 'num' in 'color' """
+ s = u"%s=%-4s" % (lead, str(num))
+ if num != 0 and ANSIBLE_COLOR and color is not None:
+ s = stringc(s, color)
+ return s
+
+
+def hostcolor(host, stats, color=True):
+ if ANSIBLE_COLOR and color:
+ if stats['failures'] != 0 or stats['unreachable'] != 0:
+ return u"%-37s" % stringc(host, C.COLOR_ERROR)
+ elif stats['changed'] != 0:
+ return u"%-37s" % stringc(host, C.COLOR_CHANGED)
+ else:
+ return u"%-37s" % stringc(host, C.COLOR_OK)
+ return u"%-26s" % host
diff --git a/lib/ansible/utils/context_objects.py b/lib/ansible/utils/context_objects.py
new file mode 100644
index 0000000..efe15fe
--- /dev/null
+++ b/lib/ansible/utils/context_objects.py
@@ -0,0 +1,92 @@
+# Copyright: (c) 2018, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+"""
+Hold command line arguments for use in other modules
+"""
+
+from abc import ABCMeta
+from collections.abc import Container, Mapping, Sequence, Set
+
+from ansible.module_utils.common.collections import ImmutableDict
+from ansible.module_utils.six import add_metaclass, binary_type, text_type
+from ansible.utils.singleton import Singleton
+
+
+def _make_immutable(obj):
+ """Recursively convert a container and objects inside of it into immutable data types"""
+ if isinstance(obj, (text_type, binary_type)):
+ # Strings first because they are also sequences
+ return obj
+ elif isinstance(obj, Mapping):
+ temp_dict = {}
+ for key, value in obj.items():
+ if isinstance(value, Container):
+ temp_dict[key] = _make_immutable(value)
+ else:
+ temp_dict[key] = value
+ return ImmutableDict(temp_dict)
+ elif isinstance(obj, Set):
+ temp_set = set()
+ for value in obj:
+ if isinstance(value, Container):
+ temp_set.add(_make_immutable(value))
+ else:
+ temp_set.add(value)
+ return frozenset(temp_set)
+ elif isinstance(obj, Sequence):
+ temp_sequence = []
+ for value in obj:
+ if isinstance(value, Container):
+ temp_sequence.append(_make_immutable(value))
+ else:
+ temp_sequence.append(value)
+ return tuple(temp_sequence)
+
+ return obj
+
+
+class _ABCSingleton(Singleton, ABCMeta):
+ """
+ Combine ABCMeta based classes with Singleton based classes
+
+ Combine Singleton and ABCMeta so we have a metaclass that unambiguously knows which can override
+ the other. Useful for making new types of containers which are also Singletons.
+ """
+ pass
+
+
+class CLIArgs(ImmutableDict):
+ """
+ Hold a parsed copy of cli arguments
+
+ We have both this non-Singleton version and the Singleton, GlobalCLIArgs, version to leave us
+ room to implement a Context object in the future. Whereas there should only be one set of args
+ in a global context, individual Context objects might want to pretend that they have different
+ command line switches to trigger different behaviour when they run. So if we support Contexts
+ in the future, they would use CLIArgs instead of GlobalCLIArgs to store their version of command
+ line flags.
+ """
+ def __init__(self, mapping):
+ toplevel = {}
+ for key, value in mapping.items():
+ toplevel[key] = _make_immutable(value)
+ super(CLIArgs, self).__init__(toplevel)
+
+ @classmethod
+ def from_options(cls, options):
+ return cls(vars(options))
+
+
+@add_metaclass(_ABCSingleton)
+class GlobalCLIArgs(CLIArgs):
+ """
+ Globally hold a parsed copy of cli arguments.
+
+ Only one of these exist per program as it is for global context
+ """
+ pass
diff --git a/lib/ansible/utils/display.py b/lib/ansible/utils/display.py
new file mode 100644
index 0000000..e521f2a
--- /dev/null
+++ b/lib/ansible/utils/display.py
@@ -0,0 +1,526 @@
+# (c) 2014, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import ctypes.util
+import fcntl
+import getpass
+import logging
+import os
+import random
+import subprocess
+import sys
+import textwrap
+import threading
+import time
+
+from struct import unpack, pack
+from termios import TIOCGWINSZ
+
+from ansible import constants as C
+from ansible.errors import AnsibleError, AnsibleAssertionError
+from ansible.module_utils._text import to_bytes, to_text
+from ansible.module_utils.six import text_type
+from ansible.utils.color import stringc
+from ansible.utils.multiprocessing import context as multiprocessing_context
+from ansible.utils.singleton import Singleton
+from ansible.utils.unsafe_proxy import wrap_var
+from functools import wraps
+
+
+_LIBC = ctypes.cdll.LoadLibrary(ctypes.util.find_library('c'))
+# Set argtypes, to avoid segfault if the wrong type is provided,
+# restype is assumed to be c_int
+_LIBC.wcwidth.argtypes = (ctypes.c_wchar,)
+_LIBC.wcswidth.argtypes = (ctypes.c_wchar_p, ctypes.c_int)
+# Max for c_int
+_MAX_INT = 2 ** (ctypes.sizeof(ctypes.c_int) * 8 - 1) - 1
+
+
+def get_text_width(text):
+ """Function that utilizes ``wcswidth`` or ``wcwidth`` to determine the
+ number of columns used to display a text string.
+
+ We try first with ``wcswidth``, and fallback to iterating each
+ character and using wcwidth individually, falling back to a value of 0
+ for non-printable wide characters.
+ """
+ if not isinstance(text, text_type):
+ raise TypeError('get_text_width requires text, not %s' % type(text))
+
+ try:
+ width = _LIBC.wcswidth(text, _MAX_INT)
+ except ctypes.ArgumentError:
+ width = -1
+ if width != -1:
+ return width
+
+ width = 0
+ counter = 0
+ for c in text:
+ counter += 1
+ if c in (u'\x08', u'\x7f', u'\x94', u'\x1b'):
+ # A few characters result in a subtraction of length:
+ # BS, DEL, CCH, ESC
+ # ESC is slightly different in that it's part of an escape sequence, and
+ # while ESC is non printable, it's part of an escape sequence, which results
+ # in a single non printable length
+ width -= 1
+ counter -= 1
+ continue
+
+ try:
+ w = _LIBC.wcwidth(c)
+ except ctypes.ArgumentError:
+ w = -1
+ if w == -1:
+ # -1 signifies a non-printable character
+ # use 0 here as a best effort
+ w = 0
+ width += w
+
+ if width == 0 and counter:
+ raise EnvironmentError(
+ 'get_text_width could not calculate text width of %r' % text
+ )
+
+ # It doesn't make sense to have a negative printable width
+ return width if width >= 0 else 0
+
+
+class FilterBlackList(logging.Filter):
+ def __init__(self, blacklist):
+ self.blacklist = [logging.Filter(name) for name in blacklist]
+
+ def filter(self, record):
+ return not any(f.filter(record) for f in self.blacklist)
+
+
+class FilterUserInjector(logging.Filter):
+ """
+ This is a filter which injects the current user as the 'user' attribute on each record. We need to add this filter
+ to all logger handlers so that 3rd party libraries won't print an exception due to user not being defined.
+ """
+
+ try:
+ username = getpass.getuser()
+ except KeyError:
+ # people like to make containers w/o actual valid passwd/shadow and use host uids
+ username = 'uid=%s' % os.getuid()
+
+ def filter(self, record):
+ record.user = FilterUserInjector.username
+ return True
+
+
+logger = None
+# TODO: make this a callback event instead
+if getattr(C, 'DEFAULT_LOG_PATH'):
+ path = C.DEFAULT_LOG_PATH
+ if path and (os.path.exists(path) and os.access(path, os.W_OK)) or os.access(os.path.dirname(path), os.W_OK):
+ # NOTE: level is kept at INFO to avoid security disclosures caused by certain libraries when using DEBUG
+ logging.basicConfig(filename=path, level=logging.INFO, # DO NOT set to logging.DEBUG
+ format='%(asctime)s p=%(process)d u=%(user)s n=%(name)s | %(message)s')
+
+ logger = logging.getLogger('ansible')
+ for handler in logging.root.handlers:
+ handler.addFilter(FilterBlackList(getattr(C, 'DEFAULT_LOG_FILTER', [])))
+ handler.addFilter(FilterUserInjector())
+ else:
+ print("[WARNING]: log file at %s is not writeable and we cannot create it, aborting\n" % path, file=sys.stderr)
+
+# map color to log levels
+color_to_log_level = {C.COLOR_ERROR: logging.ERROR,
+ C.COLOR_WARN: logging.WARNING,
+ C.COLOR_OK: logging.INFO,
+ C.COLOR_SKIP: logging.WARNING,
+ C.COLOR_UNREACHABLE: logging.ERROR,
+ C.COLOR_DEBUG: logging.DEBUG,
+ C.COLOR_CHANGED: logging.INFO,
+ C.COLOR_DEPRECATE: logging.WARNING,
+ C.COLOR_VERBOSE: logging.INFO}
+
+b_COW_PATHS = (
+ b"/usr/bin/cowsay",
+ b"/usr/games/cowsay",
+ b"/usr/local/bin/cowsay", # BSD path for cowsay
+ b"/opt/local/bin/cowsay", # MacPorts path for cowsay
+)
+
+
+def _synchronize_textiowrapper(tio, lock):
+ # Ensure that a background thread can't hold the internal buffer lock on a file object
+ # during a fork, which causes forked children to hang. We're using display's existing lock for
+ # convenience (and entering the lock before a fork).
+ def _wrap_with_lock(f, lock):
+ @wraps(f)
+ def locking_wrapper(*args, **kwargs):
+ with lock:
+ return f(*args, **kwargs)
+
+ return locking_wrapper
+
+ buffer = tio.buffer
+
+ # monkeypatching the underlying file-like object isn't great, but likely safer than subclassing
+ buffer.write = _wrap_with_lock(buffer.write, lock)
+ buffer.flush = _wrap_with_lock(buffer.flush, lock)
+
+
+class Display(metaclass=Singleton):
+
+ def __init__(self, verbosity=0):
+
+ self._final_q = None
+
+ # NB: this lock is used to both prevent intermingled output between threads and to block writes during forks.
+ # Do not change the type of this lock or upgrade to a shared lock (eg multiprocessing.RLock).
+ self._lock = threading.RLock()
+
+ self.columns = None
+ self.verbosity = verbosity
+
+ # list of all deprecation messages to prevent duplicate display
+ self._deprecations = {}
+ self._warns = {}
+ self._errors = {}
+
+ self.b_cowsay = None
+ self.noncow = C.ANSIBLE_COW_SELECTION
+
+ self.set_cowsay_info()
+
+ if self.b_cowsay:
+ try:
+ cmd = subprocess.Popen([self.b_cowsay, "-l"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ (out, err) = cmd.communicate()
+ if cmd.returncode:
+ raise Exception
+ self.cows_available = {to_text(c) for c in out.split()} # set comprehension
+ if C.ANSIBLE_COW_ACCEPTLIST and any(C.ANSIBLE_COW_ACCEPTLIST):
+ self.cows_available = set(C.ANSIBLE_COW_ACCEPTLIST).intersection(self.cows_available)
+ except Exception:
+ # could not execute cowsay for some reason
+ self.b_cowsay = False
+
+ self._set_column_width()
+
+ try:
+ # NB: we're relying on the display singleton behavior to ensure this only runs once
+ _synchronize_textiowrapper(sys.stdout, self._lock)
+ _synchronize_textiowrapper(sys.stderr, self._lock)
+ except Exception as ex:
+ self.warning(f"failed to patch stdout/stderr for fork-safety: {ex}")
+
+ def set_queue(self, queue):
+ """Set the _final_q on Display, so that we know to proxy display over the queue
+ instead of directly writing to stdout/stderr from forks
+
+ This is only needed in ansible.executor.process.worker:WorkerProcess._run
+ """
+ if multiprocessing_context.parent_process() is None:
+ raise RuntimeError('queue cannot be set in parent process')
+ self._final_q = queue
+
+ def set_cowsay_info(self):
+ if C.ANSIBLE_NOCOWS:
+ return
+
+ if C.ANSIBLE_COW_PATH:
+ self.b_cowsay = C.ANSIBLE_COW_PATH
+ else:
+ for b_cow_path in b_COW_PATHS:
+ if os.path.exists(b_cow_path):
+ self.b_cowsay = b_cow_path
+
+ def display(self, msg, color=None, stderr=False, screen_only=False, log_only=False, newline=True):
+ """ Display a message to the user
+
+ Note: msg *must* be a unicode string to prevent UnicodeError tracebacks.
+ """
+
+ if self._final_q:
+ # If _final_q is set, that means we are in a WorkerProcess
+ # and instead of displaying messages directly from the fork
+ # we will proxy them through the queue
+ return self._final_q.send_display(msg, color=color, stderr=stderr,
+ screen_only=screen_only, log_only=log_only, newline=newline)
+
+ nocolor = msg
+
+ if not log_only:
+
+ has_newline = msg.endswith(u'\n')
+ if has_newline:
+ msg2 = msg[:-1]
+ else:
+ msg2 = msg
+
+ if color:
+ msg2 = stringc(msg2, color)
+
+ if has_newline or newline:
+ msg2 = msg2 + u'\n'
+
+ # Note: After Display() class is refactored need to update the log capture
+ # code in 'bin/ansible-connection' (and other relevant places).
+ if not stderr:
+ fileobj = sys.stdout
+ else:
+ fileobj = sys.stderr
+
+ with self._lock:
+ fileobj.write(msg2)
+
+ # With locks, and the fact that we aren't printing from forks
+ # just write, and let the system flush. Everything should come out peachy
+ # I've left this code for historical purposes, or in case we need to add this
+ # back at a later date. For now ``TaskQueueManager.cleanup`` will perform a
+ # final flush at shutdown.
+ # try:
+ # fileobj.flush()
+ # except IOError as e:
+ # # Ignore EPIPE in case fileobj has been prematurely closed, eg.
+ # # when piping to "head -n1"
+ # if e.errno != errno.EPIPE:
+ # raise
+
+ if logger and not screen_only:
+ msg2 = nocolor.lstrip('\n')
+
+ lvl = logging.INFO
+ if color:
+ # set logger level based on color (not great)
+ try:
+ lvl = color_to_log_level[color]
+ except KeyError:
+ # this should not happen, but JIC
+ raise AnsibleAssertionError('Invalid color supplied to display: %s' % color)
+ # actually log
+ logger.log(lvl, msg2)
+
+ def v(self, msg, host=None):
+ return self.verbose(msg, host=host, caplevel=0)
+
+ def vv(self, msg, host=None):
+ return self.verbose(msg, host=host, caplevel=1)
+
+ def vvv(self, msg, host=None):
+ return self.verbose(msg, host=host, caplevel=2)
+
+ def vvvv(self, msg, host=None):
+ return self.verbose(msg, host=host, caplevel=3)
+
+ def vvvvv(self, msg, host=None):
+ return self.verbose(msg, host=host, caplevel=4)
+
+ def vvvvvv(self, msg, host=None):
+ return self.verbose(msg, host=host, caplevel=5)
+
+ def debug(self, msg, host=None):
+ if C.DEFAULT_DEBUG:
+ if host is None:
+ self.display("%6d %0.5f: %s" % (os.getpid(), time.time(), msg), color=C.COLOR_DEBUG)
+ else:
+ self.display("%6d %0.5f [%s]: %s" % (os.getpid(), time.time(), host, msg), color=C.COLOR_DEBUG)
+
+ def verbose(self, msg, host=None, caplevel=2):
+
+ to_stderr = C.VERBOSE_TO_STDERR
+ if self.verbosity > caplevel:
+ if host is None:
+ self.display(msg, color=C.COLOR_VERBOSE, stderr=to_stderr)
+ else:
+ self.display("<%s> %s" % (host, msg), color=C.COLOR_VERBOSE, stderr=to_stderr)
+
+ def get_deprecation_message(self, msg, version=None, removed=False, date=None, collection_name=None):
+ ''' used to print out a deprecation message.'''
+ msg = msg.strip()
+ if msg and msg[-1] not in ['!', '?', '.']:
+ msg += '.'
+
+ if collection_name == 'ansible.builtin':
+ collection_name = 'ansible-core'
+
+ if removed:
+ header = '[DEPRECATED]: {0}'.format(msg)
+ removal_fragment = 'This feature was removed'
+ help_text = 'Please update your playbooks.'
+ else:
+ header = '[DEPRECATION WARNING]: {0}'.format(msg)
+ removal_fragment = 'This feature will be removed'
+ # FUTURE: make this a standalone warning so it only shows up once?
+ help_text = 'Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg.'
+
+ if collection_name:
+ from_fragment = 'from {0}'.format(collection_name)
+ else:
+ from_fragment = ''
+
+ if date:
+ when = 'in a release after {0}.'.format(date)
+ elif version:
+ when = 'in version {0}.'.format(version)
+ else:
+ when = 'in a future release.'
+
+ message_text = ' '.join(f for f in [header, removal_fragment, from_fragment, when, help_text] if f)
+
+ return message_text
+
+ def deprecated(self, msg, version=None, removed=False, date=None, collection_name=None):
+ if not removed and not C.DEPRECATION_WARNINGS:
+ return
+
+ message_text = self.get_deprecation_message(msg, version=version, removed=removed, date=date, collection_name=collection_name)
+
+ if removed:
+ raise AnsibleError(message_text)
+
+ wrapped = textwrap.wrap(message_text, self.columns, drop_whitespace=False)
+ message_text = "\n".join(wrapped) + "\n"
+
+ if message_text not in self._deprecations:
+ self.display(message_text.strip(), color=C.COLOR_DEPRECATE, stderr=True)
+ self._deprecations[message_text] = 1
+
+ def warning(self, msg, formatted=False):
+
+ if not formatted:
+ new_msg = "[WARNING]: %s" % msg
+ wrapped = textwrap.wrap(new_msg, self.columns)
+ new_msg = "\n".join(wrapped) + "\n"
+ else:
+ new_msg = "\n[WARNING]: \n%s" % msg
+
+ if new_msg not in self._warns:
+ self.display(new_msg, color=C.COLOR_WARN, stderr=True)
+ self._warns[new_msg] = 1
+
+ def system_warning(self, msg):
+ if C.SYSTEM_WARNINGS:
+ self.warning(msg)
+
+ def banner(self, msg, color=None, cows=True):
+ '''
+ Prints a header-looking line with cowsay or stars with length depending on terminal width (3 minimum)
+ '''
+ msg = to_text(msg)
+
+ if self.b_cowsay and cows:
+ try:
+ self.banner_cowsay(msg)
+ return
+ except OSError:
+ self.warning("somebody cleverly deleted cowsay or something during the PB run. heh.")
+
+ msg = msg.strip()
+ try:
+ star_len = self.columns - get_text_width(msg)
+ except EnvironmentError:
+ star_len = self.columns - len(msg)
+ if star_len <= 3:
+ star_len = 3
+ stars = u"*" * star_len
+ self.display(u"\n%s %s" % (msg, stars), color=color)
+
+ def banner_cowsay(self, msg, color=None):
+ if u": [" in msg:
+ msg = msg.replace(u"[", u"")
+ if msg.endswith(u"]"):
+ msg = msg[:-1]
+ runcmd = [self.b_cowsay, b"-W", b"60"]
+ if self.noncow:
+ thecow = self.noncow
+ if thecow == 'random':
+ thecow = random.choice(list(self.cows_available))
+ runcmd.append(b'-f')
+ runcmd.append(to_bytes(thecow))
+ runcmd.append(to_bytes(msg))
+ cmd = subprocess.Popen(runcmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ (out, err) = cmd.communicate()
+ self.display(u"%s\n" % to_text(out), color=color)
+
+ def error(self, msg, wrap_text=True):
+ if wrap_text:
+ new_msg = u"\n[ERROR]: %s" % msg
+ wrapped = textwrap.wrap(new_msg, self.columns)
+ new_msg = u"\n".join(wrapped) + u"\n"
+ else:
+ new_msg = u"ERROR! %s" % msg
+ if new_msg not in self._errors:
+ self.display(new_msg, color=C.COLOR_ERROR, stderr=True)
+ self._errors[new_msg] = 1
+
+ @staticmethod
+ def prompt(msg, private=False):
+ if private:
+ return getpass.getpass(msg)
+ else:
+ return input(msg)
+
+ def do_var_prompt(self, varname, private=True, prompt=None, encrypt=None, confirm=False, salt_size=None, salt=None, default=None, unsafe=None):
+
+ result = None
+ if sys.__stdin__.isatty():
+
+ do_prompt = self.prompt
+
+ if prompt and default is not None:
+ msg = "%s [%s]: " % (prompt, default)
+ elif prompt:
+ msg = "%s: " % prompt
+ else:
+ msg = 'input for %s: ' % varname
+
+ if confirm:
+ while True:
+ result = do_prompt(msg, private)
+ second = do_prompt("confirm " + msg, private)
+ if result == second:
+ break
+ self.display("***** VALUES ENTERED DO NOT MATCH ****")
+ else:
+ result = do_prompt(msg, private)
+ else:
+ result = None
+ self.warning("Not prompting as we are not in interactive mode")
+
+ # if result is false and default is not None
+ if not result and default is not None:
+ result = default
+
+ if encrypt:
+ # Circular import because encrypt needs a display class
+ from ansible.utils.encrypt import do_encrypt
+ result = do_encrypt(result, encrypt, salt_size, salt)
+
+ # handle utf-8 chars
+ result = to_text(result, errors='surrogate_or_strict')
+
+ if unsafe:
+ result = wrap_var(result)
+ return result
+
+ def _set_column_width(self):
+ if os.isatty(1):
+ tty_size = unpack('HHHH', fcntl.ioctl(1, TIOCGWINSZ, pack('HHHH', 0, 0, 0, 0)))[1]
+ else:
+ tty_size = 0
+ self.columns = max(79, tty_size - 1)
diff --git a/lib/ansible/utils/encrypt.py b/lib/ansible/utils/encrypt.py
new file mode 100644
index 0000000..3a8642d
--- /dev/null
+++ b/lib/ansible/utils/encrypt.py
@@ -0,0 +1,272 @@
+# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+# (c) 2017 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import multiprocessing
+import random
+import re
+import string
+import sys
+
+from collections import namedtuple
+
+from ansible import constants as C
+from ansible.errors import AnsibleError, AnsibleAssertionError
+from ansible.module_utils.six import text_type
+from ansible.module_utils._text import to_text, to_bytes
+from ansible.utils.display import Display
+
+PASSLIB_E = CRYPT_E = None
+HAS_CRYPT = PASSLIB_AVAILABLE = False
+try:
+ import passlib
+ import passlib.hash
+ from passlib.utils.handlers import HasRawSalt, PrefixWrapper
+ try:
+ from passlib.utils.binary import bcrypt64
+ except ImportError:
+ from passlib.utils import bcrypt64
+ PASSLIB_AVAILABLE = True
+except Exception as e:
+ PASSLIB_E = e
+
+try:
+ import crypt
+ HAS_CRYPT = True
+except Exception as e:
+ CRYPT_E = e
+
+
+display = Display()
+
+__all__ = ['do_encrypt']
+
+_LOCK = multiprocessing.Lock()
+
+DEFAULT_PASSWORD_LENGTH = 20
+
+
+def random_password(length=DEFAULT_PASSWORD_LENGTH, chars=C.DEFAULT_PASSWORD_CHARS, seed=None):
+ '''Return a random password string of length containing only chars
+
+ :kwarg length: The number of characters in the new password. Defaults to 20.
+ :kwarg chars: The characters to choose from. The default is all ascii
+ letters, ascii digits, and these symbols ``.,:-_``
+ '''
+ if not isinstance(chars, text_type):
+ raise AnsibleAssertionError('%s (%s) is not a text_type' % (chars, type(chars)))
+
+ if seed is None:
+ random_generator = random.SystemRandom()
+ else:
+ random_generator = random.Random(seed)
+ return u''.join(random_generator.choice(chars) for dummy in range(length))
+
+
+def random_salt(length=8):
+ """Return a text string suitable for use as a salt for the hash functions we use to encrypt passwords.
+ """
+ # Note passlib salt values must be pure ascii so we can't let the user
+ # configure this
+ salt_chars = string.ascii_letters + string.digits + u'./'
+ return random_password(length=length, chars=salt_chars)
+
+
+class BaseHash(object):
+ algo = namedtuple('algo', ['crypt_id', 'salt_size', 'implicit_rounds', 'salt_exact', 'implicit_ident'])
+ algorithms = {
+ 'md5_crypt': algo(crypt_id='1', salt_size=8, implicit_rounds=None, salt_exact=False, implicit_ident=None),
+ 'bcrypt': algo(crypt_id='2b', salt_size=22, implicit_rounds=12, salt_exact=True, implicit_ident='2b'),
+ 'sha256_crypt': algo(crypt_id='5', salt_size=16, implicit_rounds=535000, salt_exact=False, implicit_ident=None),
+ 'sha512_crypt': algo(crypt_id='6', salt_size=16, implicit_rounds=656000, salt_exact=False, implicit_ident=None),
+ }
+
+ def __init__(self, algorithm):
+ self.algorithm = algorithm
+
+
+class CryptHash(BaseHash):
+ def __init__(self, algorithm):
+ super(CryptHash, self).__init__(algorithm)
+
+ if not HAS_CRYPT:
+ raise AnsibleError("crypt.crypt cannot be used as the 'crypt' python library is not installed or is unusable.", orig_exc=CRYPT_E)
+
+ if sys.platform.startswith('darwin'):
+ raise AnsibleError("crypt.crypt not supported on Mac OS X/Darwin, install passlib python module")
+
+ if algorithm not in self.algorithms:
+ raise AnsibleError("crypt.crypt does not support '%s' algorithm" % self.algorithm)
+
+ display.deprecated(
+ "Encryption using the Python crypt module is deprecated. The "
+ "Python crypt module is deprecated and will be removed from "
+ "Python 3.13. Install the passlib library for continued "
+ "encryption functionality.",
+ version=2.17
+ )
+
+ self.algo_data = self.algorithms[algorithm]
+
+ def hash(self, secret, salt=None, salt_size=None, rounds=None, ident=None):
+ salt = self._salt(salt, salt_size)
+ rounds = self._rounds(rounds)
+ ident = self._ident(ident)
+ return self._hash(secret, salt, rounds, ident)
+
+ def _salt(self, salt, salt_size):
+ salt_size = salt_size or self.algo_data.salt_size
+ ret = salt or random_salt(salt_size)
+ if re.search(r'[^./0-9A-Za-z]', ret):
+ raise AnsibleError("invalid characters in salt")
+ if self.algo_data.salt_exact and len(ret) != self.algo_data.salt_size:
+ raise AnsibleError("invalid salt size")
+ elif not self.algo_data.salt_exact and len(ret) > self.algo_data.salt_size:
+ raise AnsibleError("invalid salt size")
+ return ret
+
+ def _rounds(self, rounds):
+ if rounds == self.algo_data.implicit_rounds:
+ # Passlib does not include the rounds if it is the same as implicit_rounds.
+ # Make crypt lib behave the same, by not explicitly specifying the rounds in that case.
+ return None
+ else:
+ return rounds
+
+ def _ident(self, ident):
+ if not ident:
+ return self.algo_data.crypt_id
+ if self.algorithm == 'bcrypt':
+ return ident
+ return None
+
+ def _hash(self, secret, salt, rounds, ident):
+ saltstring = ""
+ if ident:
+ saltstring = "$%s" % ident
+
+ if rounds:
+ saltstring += "$rounds=%d" % rounds
+
+ saltstring += "$%s" % salt
+
+ # crypt.crypt on Python < 3.9 returns None if it cannot parse saltstring
+ # On Python >= 3.9, it throws OSError.
+ try:
+ result = crypt.crypt(secret, saltstring)
+ orig_exc = None
+ except OSError as e:
+ result = None
+ orig_exc = e
+
+ # None as result would be interpreted by the some modules (user module)
+ # as no password at all.
+ if not result:
+ raise AnsibleError(
+ "crypt.crypt does not support '%s' algorithm" % self.algorithm,
+ orig_exc=orig_exc,
+ )
+
+ return result
+
+
+class PasslibHash(BaseHash):
+ def __init__(self, algorithm):
+ super(PasslibHash, self).__init__(algorithm)
+
+ if not PASSLIB_AVAILABLE:
+ raise AnsibleError("passlib must be installed and usable to hash with '%s'" % algorithm, orig_exc=PASSLIB_E)
+
+ try:
+ self.crypt_algo = getattr(passlib.hash, algorithm)
+ except Exception:
+ raise AnsibleError("passlib does not support '%s' algorithm" % algorithm)
+
+ def hash(self, secret, salt=None, salt_size=None, rounds=None, ident=None):
+ salt = self._clean_salt(salt)
+ rounds = self._clean_rounds(rounds)
+ ident = self._clean_ident(ident)
+ return self._hash(secret, salt=salt, salt_size=salt_size, rounds=rounds, ident=ident)
+
+ def _clean_ident(self, ident):
+ ret = None
+ if not ident:
+ if self.algorithm in self.algorithms:
+ return self.algorithms.get(self.algorithm).implicit_ident
+ return ret
+ if self.algorithm == 'bcrypt':
+ return ident
+ return ret
+
+ def _clean_salt(self, salt):
+ if not salt:
+ return None
+ elif issubclass(self.crypt_algo.wrapped if isinstance(self.crypt_algo, PrefixWrapper) else self.crypt_algo, HasRawSalt):
+ ret = to_bytes(salt, encoding='ascii', errors='strict')
+ else:
+ ret = to_text(salt, encoding='ascii', errors='strict')
+
+ # Ensure the salt has the correct padding
+ if self.algorithm == 'bcrypt':
+ ret = bcrypt64.repair_unused(ret)
+
+ return ret
+
+ def _clean_rounds(self, rounds):
+ algo_data = self.algorithms.get(self.algorithm)
+ if rounds:
+ return rounds
+ elif algo_data and algo_data.implicit_rounds:
+ # The default rounds used by passlib depend on the passlib version.
+ # For consistency ensure that passlib behaves the same as crypt in case no rounds were specified.
+ # Thus use the crypt defaults.
+ return algo_data.implicit_rounds
+ else:
+ return None
+
+ def _hash(self, secret, salt, salt_size, rounds, ident):
+ # Not every hash algorithm supports every parameter.
+ # Thus create the settings dict only with set parameters.
+ settings = {}
+ if salt:
+ settings['salt'] = salt
+ if salt_size:
+ settings['salt_size'] = salt_size
+ if rounds:
+ settings['rounds'] = rounds
+ if ident:
+ settings['ident'] = ident
+
+ # starting with passlib 1.7 'using' and 'hash' should be used instead of 'encrypt'
+ if hasattr(self.crypt_algo, 'hash'):
+ result = self.crypt_algo.using(**settings).hash(secret)
+ elif hasattr(self.crypt_algo, 'encrypt'):
+ result = self.crypt_algo.encrypt(secret, **settings)
+ else:
+ raise AnsibleError("installed passlib version %s not supported" % passlib.__version__)
+
+ # passlib.hash should always return something or raise an exception.
+ # Still ensure that there is always a result.
+ # Otherwise an empty password might be assumed by some modules, like the user module.
+ if not result:
+ raise AnsibleError("failed to hash with algorithm '%s'" % self.algorithm)
+
+ # Hashes from passlib.hash should be represented as ascii strings of hex
+ # digits so this should not traceback. If it's not representable as such
+ # we need to traceback and then block such algorithms because it may
+ # impact calling code.
+ return to_text(result, errors='strict')
+
+
+def passlib_or_crypt(secret, algorithm, salt=None, salt_size=None, rounds=None, ident=None):
+ if PASSLIB_AVAILABLE:
+ return PasslibHash(algorithm).hash(secret, salt=salt, salt_size=salt_size, rounds=rounds, ident=ident)
+ if HAS_CRYPT:
+ return CryptHash(algorithm).hash(secret, salt=salt, salt_size=salt_size, rounds=rounds, ident=ident)
+ raise AnsibleError("Unable to encrypt nor hash, either crypt or passlib must be installed.", orig_exc=CRYPT_E)
+
+
+def do_encrypt(result, encrypt, salt_size=None, salt=None, ident=None):
+ return passlib_or_crypt(result, encrypt, salt_size=salt_size, salt=salt, ident=ident)
diff --git a/lib/ansible/utils/fqcn.py b/lib/ansible/utils/fqcn.py
new file mode 100644
index 0000000..a492be1
--- /dev/null
+++ b/lib/ansible/utils/fqcn.py
@@ -0,0 +1,33 @@
+# (c) 2020, Felix Fontein <felix@fontein.de>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+def add_internal_fqcns(names):
+ '''
+ Given a sequence of action/module names, returns a list of these names
+ with the same names with the prefixes `ansible.builtin.` and
+ `ansible.legacy.` added for all names that are not already FQCNs.
+ '''
+ result = []
+ for name in names:
+ result.append(name)
+ if '.' not in name:
+ result.append('ansible.builtin.%s' % name)
+ result.append('ansible.legacy.%s' % name)
+ return result
diff --git a/lib/ansible/utils/galaxy.py b/lib/ansible/utils/galaxy.py
new file mode 100644
index 0000000..bbb26fb
--- /dev/null
+++ b/lib/ansible/utils/galaxy.py
@@ -0,0 +1,107 @@
+# (c) 2014 Michael DeHaan, <michael@ansible.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os
+import tempfile
+from subprocess import Popen, PIPE
+import tarfile
+
+import ansible.constants as C
+from ansible import context
+from ansible.errors import AnsibleError
+from ansible.utils.display import Display
+from ansible.module_utils.common.process import get_bin_path
+from ansible.module_utils.common.text.converters import to_text, to_native
+
+
+display = Display()
+
+
+def scm_archive_collection(src, name=None, version='HEAD'):
+ return scm_archive_resource(src, scm='git', name=name, version=version, keep_scm_meta=False)
+
+
+def scm_archive_resource(src, scm='git', name=None, version='HEAD', keep_scm_meta=False):
+
+ def run_scm_cmd(cmd, tempdir):
+ try:
+ stdout = ''
+ stderr = ''
+ popen = Popen(cmd, cwd=tempdir, stdout=PIPE, stderr=PIPE)
+ stdout, stderr = popen.communicate()
+ except Exception as e:
+ ran = " ".join(cmd)
+ display.debug("ran %s:" % ran)
+ raise AnsibleError("when executing %s: %s" % (ran, to_native(e)))
+ if popen.returncode != 0:
+ raise AnsibleError("- command %s failed in directory %s (rc=%s) - %s" % (' '.join(cmd), tempdir, popen.returncode, to_native(stderr)))
+
+ if scm not in ['hg', 'git']:
+ raise AnsibleError("- scm %s is not currently supported" % scm)
+
+ try:
+ scm_path = get_bin_path(scm)
+ except (ValueError, OSError, IOError):
+ raise AnsibleError("could not find/use %s, it is required to continue with installing %s" % (scm, src))
+
+ tempdir = tempfile.mkdtemp(dir=C.DEFAULT_LOCAL_TMP)
+ clone_cmd = [scm_path, 'clone']
+
+ # Add specific options for ignoring certificates if requested
+ ignore_certs = context.CLIARGS['ignore_certs']
+
+ if ignore_certs:
+ if scm == 'git':
+ clone_cmd.extend(['-c', 'http.sslVerify=false'])
+ elif scm == 'hg':
+ clone_cmd.append('--insecure')
+
+ clone_cmd.extend([src, name])
+
+ run_scm_cmd(clone_cmd, tempdir)
+
+ if scm == 'git' and version:
+ checkout_cmd = [scm_path, 'checkout', to_text(version)]
+ run_scm_cmd(checkout_cmd, os.path.join(tempdir, name))
+
+ temp_file = tempfile.NamedTemporaryFile(delete=False, suffix='.tar', dir=C.DEFAULT_LOCAL_TMP)
+ archive_cmd = None
+ if keep_scm_meta:
+ display.vvv('tarring %s from %s to %s' % (name, tempdir, temp_file.name))
+ with tarfile.open(temp_file.name, "w") as tar:
+ tar.add(os.path.join(tempdir, name), arcname=name)
+ elif scm == 'hg':
+ archive_cmd = [scm_path, 'archive', '--prefix', "%s/" % name]
+ if version:
+ archive_cmd.extend(['-r', version])
+ archive_cmd.append(temp_file.name)
+ elif scm == 'git':
+ archive_cmd = [scm_path, 'archive', '--prefix=%s/' % name, '--output=%s' % temp_file.name]
+ if version:
+ archive_cmd.append(version)
+ else:
+ archive_cmd.append('HEAD')
+
+ if archive_cmd is not None:
+ display.vvv('archiving %s' % archive_cmd)
+ run_scm_cmd(archive_cmd, os.path.join(tempdir, name))
+
+ return temp_file.name
diff --git a/lib/ansible/utils/hashing.py b/lib/ansible/utils/hashing.py
new file mode 100644
index 0000000..71300d6
--- /dev/null
+++ b/lib/ansible/utils/hashing.py
@@ -0,0 +1,89 @@
+# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os
+
+from hashlib import sha1
+
+try:
+ from hashlib import md5 as _md5
+except ImportError:
+ # Assume we're running in FIPS mode here
+ _md5 = None
+
+from ansible.errors import AnsibleError
+from ansible.module_utils._text import to_bytes
+
+
+def secure_hash_s(data, hash_func=sha1):
+ ''' Return a secure hash hex digest of data. '''
+
+ digest = hash_func()
+ data = to_bytes(data, errors='surrogate_or_strict')
+ digest.update(data)
+ return digest.hexdigest()
+
+
+def secure_hash(filename, hash_func=sha1):
+ ''' Return a secure hash hex digest of local file, None if file is not present or a directory. '''
+
+ if not os.path.exists(to_bytes(filename, errors='surrogate_or_strict')) or os.path.isdir(to_bytes(filename, errors='strict')):
+ return None
+ digest = hash_func()
+ blocksize = 64 * 1024
+ try:
+ infile = open(to_bytes(filename, errors='surrogate_or_strict'), 'rb')
+ block = infile.read(blocksize)
+ while block:
+ digest.update(block)
+ block = infile.read(blocksize)
+ infile.close()
+ except IOError as e:
+ raise AnsibleError("error while accessing the file %s, error was: %s" % (filename, e))
+ return digest.hexdigest()
+
+
+# The checksum algorithm must match with the algorithm in ShellModule.checksum() method
+checksum = secure_hash
+checksum_s = secure_hash_s
+
+
+#
+# Backwards compat functions. Some modules include md5s in their return values
+# Continue to support that for now. As of ansible-1.8, all of those modules
+# should also return "checksum" (sha1 for now)
+# Do not use md5 unless it is needed for:
+# 1) Optional backwards compatibility
+# 2) Compliance with a third party protocol
+#
+# MD5 will not work on systems which are FIPS-140-2 compliant.
+#
+
+def md5s(data):
+ if not _md5:
+ raise ValueError('MD5 not available. Possibly running in FIPS mode')
+ return secure_hash_s(data, _md5)
+
+
+def md5(filename):
+ if not _md5:
+ raise ValueError('MD5 not available. Possibly running in FIPS mode')
+ return secure_hash(filename, _md5)
diff --git a/lib/ansible/utils/helpers.py b/lib/ansible/utils/helpers.py
new file mode 100644
index 0000000..658ad99
--- /dev/null
+++ b/lib/ansible/utils/helpers.py
@@ -0,0 +1,51 @@
+# (c) 2016, Ansible by Red Hat <info@ansible.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from ansible.module_utils.six import string_types
+
+
+def pct_to_int(value, num_items, min_value=1):
+ '''
+ Converts a given value to a percentage if specified as "x%",
+ otherwise converts the given value to an integer.
+ '''
+ if isinstance(value, string_types) and value.endswith('%'):
+ value_pct = int(value.replace("%", ""))
+ return int((value_pct / 100.0) * num_items) or min_value
+ else:
+ return int(value)
+
+
+def object_to_dict(obj, exclude=None):
+ """
+ Converts an object into a dict making the properties into keys, allows excluding certain keys
+ """
+ if exclude is None or not isinstance(exclude, list):
+ exclude = []
+ return dict((key, getattr(obj, key)) for key in dir(obj) if not (key.startswith('_') or key in exclude))
+
+
+def deduplicate_list(original_list):
+ """
+ Creates a deduplicated list with the order in which each item is first found.
+ """
+ seen = set()
+ return [x for x in original_list if x not in seen and not seen.add(x)]
diff --git a/lib/ansible/utils/jsonrpc.py b/lib/ansible/utils/jsonrpc.py
new file mode 100644
index 0000000..8d5b0f6
--- /dev/null
+++ b/lib/ansible/utils/jsonrpc.py
@@ -0,0 +1,113 @@
+# (c) 2017, Peter Sprygada <psprygad@redhat.com>
+# (c) 2017 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import json
+import pickle
+import traceback
+
+from ansible.module_utils._text import to_text
+from ansible.module_utils.connection import ConnectionError
+from ansible.module_utils.six import binary_type, text_type
+from ansible.utils.display import Display
+
+display = Display()
+
+
+class JsonRpcServer(object):
+
+ _objects = set() # type: set[object]
+
+ def handle_request(self, request):
+ request = json.loads(to_text(request, errors='surrogate_then_replace'))
+
+ method = request.get('method')
+
+ if method.startswith('rpc.') or method.startswith('_'):
+ error = self.invalid_request()
+ return json.dumps(error)
+
+ args, kwargs = request.get('params')
+ setattr(self, '_identifier', request.get('id'))
+
+ rpc_method = None
+ for obj in self._objects:
+ rpc_method = getattr(obj, method, None)
+ if rpc_method:
+ break
+
+ if not rpc_method:
+ error = self.method_not_found()
+ response = json.dumps(error)
+ else:
+ try:
+ result = rpc_method(*args, **kwargs)
+ except ConnectionError as exc:
+ display.vvv(traceback.format_exc())
+ try:
+ error = self.error(code=exc.code, message=to_text(exc))
+ except AttributeError:
+ error = self.internal_error(data=to_text(exc))
+ response = json.dumps(error)
+ except Exception as exc:
+ display.vvv(traceback.format_exc())
+ error = self.internal_error(data=to_text(exc, errors='surrogate_then_replace'))
+ response = json.dumps(error)
+ else:
+ if isinstance(result, dict) and 'jsonrpc' in result:
+ response = result
+ else:
+ response = self.response(result)
+
+ try:
+ response = json.dumps(response)
+ except Exception as exc:
+ display.vvv(traceback.format_exc())
+ error = self.internal_error(data=to_text(exc, errors='surrogate_then_replace'))
+ response = json.dumps(error)
+
+ delattr(self, '_identifier')
+
+ return response
+
+ def register(self, obj):
+ self._objects.add(obj)
+
+ def header(self):
+ return {'jsonrpc': '2.0', 'id': self._identifier}
+
+ def response(self, result=None):
+ response = self.header()
+ if isinstance(result, binary_type):
+ result = to_text(result)
+ if not isinstance(result, text_type):
+ response["result_type"] = "pickle"
+ result = to_text(pickle.dumps(result, protocol=0))
+ response['result'] = result
+ return response
+
+ def error(self, code, message, data=None):
+ response = self.header()
+ error = {'code': code, 'message': message}
+ if data:
+ error['data'] = data
+ response['error'] = error
+ return response
+
+ # json-rpc standard errors (-32768 .. -32000)
+ def parse_error(self, data=None):
+ return self.error(-32700, 'Parse error', data)
+
+ def method_not_found(self, data=None):
+ return self.error(-32601, 'Method not found', data)
+
+ def invalid_request(self, data=None):
+ return self.error(-32600, 'Invalid request', data)
+
+ def invalid_params(self, data=None):
+ return self.error(-32602, 'Invalid params', data)
+
+ def internal_error(self, data=None):
+ return self.error(-32603, 'Internal error', data)
diff --git a/lib/ansible/utils/listify.py b/lib/ansible/utils/listify.py
new file mode 100644
index 0000000..0e6a872
--- /dev/null
+++ b/lib/ansible/utils/listify.py
@@ -0,0 +1,46 @@
+# (c) 2014 Michael DeHaan, <michael@ansible.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from collections.abc import Iterable
+
+from ansible.module_utils.six import string_types
+from ansible.utils.display import Display
+
+display = Display()
+
+__all__ = ['listify_lookup_plugin_terms']
+
+
+def listify_lookup_plugin_terms(terms, templar, loader=None, fail_on_undefined=True, convert_bare=False):
+
+ if loader is not None:
+ display.deprecated('"listify_lookup_plugin_terms" does not use "dataloader" anymore, the ability to pass it in will be removed in future versions.',
+ version='2.18')
+
+ if isinstance(terms, string_types):
+ terms = templar.template(terms.strip(), convert_bare=convert_bare, fail_on_undefined=fail_on_undefined)
+ else:
+ terms = templar.template(terms, fail_on_undefined=fail_on_undefined)
+
+ if isinstance(terms, string_types) or not isinstance(terms, Iterable):
+ terms = [terms]
+
+ return terms
diff --git a/lib/ansible/utils/lock.py b/lib/ansible/utils/lock.py
new file mode 100644
index 0000000..34387dc
--- /dev/null
+++ b/lib/ansible/utils/lock.py
@@ -0,0 +1,43 @@
+# Copyright (c) 2020 Matt Martz <matt@sivel.net>
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from functools import wraps
+
+
+def lock_decorator(attr='missing_lock_attr', lock=None):
+ '''This decorator is a generic implementation that allows you
+ to either use a pre-defined instance attribute as the location
+ of the lock, or to explicitly pass a lock object.
+
+ This code was implemented with ``threading.Lock`` in mind, but
+ may work with other locks, assuming that they function as
+ context managers.
+
+ When using ``attr``, the assumption is the first argument to
+ the wrapped method, is ``self`` or ``cls``.
+
+ Examples:
+
+ @lock_decorator(attr='_callback_lock')
+ def send_callback(...):
+
+ @lock_decorator(lock=threading.Lock())
+ def some_method(...):
+ '''
+ def outer(func):
+ @wraps(func)
+ def inner(*args, **kwargs):
+ # Python2 doesn't have ``nonlocal``
+ # assign the actual lock to ``_lock``
+ if lock is None:
+ _lock = getattr(args[0], attr)
+ else:
+ _lock = lock
+ with _lock:
+ return func(*args, **kwargs)
+ return inner
+ return outer
diff --git a/lib/ansible/utils/multiprocessing.py b/lib/ansible/utils/multiprocessing.py
new file mode 100644
index 0000000..2912f71
--- /dev/null
+++ b/lib/ansible/utils/multiprocessing.py
@@ -0,0 +1,17 @@
+# Copyright (c) 2019 Matt Martz <matt@sivel.net>
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import multiprocessing
+
+# Explicit multiprocessing context using the fork start method
+# This exists as a compat layer now that Python3.8 has changed the default
+# start method for macOS to ``spawn`` which is incompatible with our
+# code base currently
+#
+# This exists in utils to allow it to be easily imported into various places
+# without causing circular import or dependency problems
+context = multiprocessing.get_context('fork')
diff --git a/lib/ansible/utils/native_jinja.py b/lib/ansible/utils/native_jinja.py
new file mode 100644
index 0000000..53ef140
--- /dev/null
+++ b/lib/ansible/utils/native_jinja.py
@@ -0,0 +1,13 @@
+# Copyright: (c) 2020, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+from ansible.module_utils.six import text_type
+
+
+class NativeJinjaText(text_type):
+ pass
diff --git a/lib/ansible/utils/path.py b/lib/ansible/utils/path.py
new file mode 100644
index 0000000..f876add
--- /dev/null
+++ b/lib/ansible/utils/path.py
@@ -0,0 +1,161 @@
+# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os
+import shutil
+
+from errno import EEXIST
+from ansible.errors import AnsibleError
+from ansible.module_utils._text import to_bytes, to_native, to_text
+
+
+__all__ = ['unfrackpath', 'makedirs_safe']
+
+
+def unfrackpath(path, follow=True, basedir=None):
+ '''
+ Returns a path that is free of symlinks (if follow=True), environment variables, relative path traversals and symbols (~)
+
+ :arg path: A byte or text string representing a path to be canonicalized
+ :arg follow: A boolean to indicate of symlinks should be resolved or not
+ :raises UnicodeDecodeError: If the canonicalized version of the path
+ contains non-utf8 byte sequences.
+ :rtype: A text string (unicode on pyyhon2, str on python3).
+ :returns: An absolute path with symlinks, environment variables, and tilde
+ expanded. Note that this does not check whether a path exists.
+
+ example::
+ '$HOME/../../var/mail' becomes '/var/spool/mail'
+ '''
+
+ b_basedir = to_bytes(basedir, errors='surrogate_or_strict', nonstring='passthru')
+
+ if b_basedir is None:
+ b_basedir = to_bytes(os.getcwd(), errors='surrogate_or_strict')
+ elif os.path.isfile(b_basedir):
+ b_basedir = os.path.dirname(b_basedir)
+
+ b_final_path = os.path.expanduser(os.path.expandvars(to_bytes(path, errors='surrogate_or_strict')))
+
+ if not os.path.isabs(b_final_path):
+ b_final_path = os.path.join(b_basedir, b_final_path)
+
+ if follow:
+ b_final_path = os.path.realpath(b_final_path)
+
+ return to_text(os.path.normpath(b_final_path), errors='surrogate_or_strict')
+
+
+def makedirs_safe(path, mode=None):
+ '''
+ A *potentially insecure* way to ensure the existence of a directory chain. The "safe" in this function's name
+ refers only to its ability to ignore `EEXIST` in the case of multiple callers operating on the same part of
+ the directory chain. This function is not safe to use under world-writable locations when the first level of the
+ path to be created contains a predictable component. Always create a randomly-named element first if there is any
+ chance the parent directory might be world-writable (eg, /tmp) to prevent symlink hijacking and potential
+ disclosure or modification of sensitive file contents.
+
+ :arg path: A byte or text string representing a directory chain to be created
+ :kwarg mode: If given, the mode to set the directory to
+ :raises AnsibleError: If the directory cannot be created and does not already exist.
+ :raises UnicodeDecodeError: if the path is not decodable in the utf-8 encoding.
+ '''
+
+ rpath = unfrackpath(path)
+ b_rpath = to_bytes(rpath)
+ if not os.path.exists(b_rpath):
+ try:
+ if mode:
+ os.makedirs(b_rpath, mode)
+ else:
+ os.makedirs(b_rpath)
+ except OSError as e:
+ if e.errno != EEXIST:
+ raise AnsibleError("Unable to create local directories(%s): %s" % (to_native(rpath), to_native(e)))
+
+
+def basedir(source):
+ """ returns directory for inventory or playbook """
+ source = to_bytes(source, errors='surrogate_or_strict')
+ dname = None
+ if os.path.isdir(source):
+ dname = source
+ elif source in [None, '', '.']:
+ dname = os.getcwd()
+ elif os.path.isfile(source):
+ dname = os.path.dirname(source)
+
+ if dname:
+ # don't follow symlinks for basedir, enables source re-use
+ dname = os.path.abspath(dname)
+
+ return to_text(dname, errors='surrogate_or_strict')
+
+
+def cleanup_tmp_file(path, warn=False):
+ """
+ Removes temporary file or directory. Optionally display a warning if unable
+ to remove the file or directory.
+
+ :arg path: Path to file or directory to be removed
+ :kwarg warn: Whether or not to display a warning when the file or directory
+ cannot be removed
+ """
+ try:
+ if os.path.exists(path):
+ try:
+ if os.path.isdir(path):
+ shutil.rmtree(path)
+ elif os.path.isfile(path):
+ os.unlink(path)
+ except Exception as e:
+ if warn:
+ # Importing here to avoid circular import
+ from ansible.utils.display import Display
+ display = Display()
+ display.display(u'Unable to remove temporary file {0}'.format(to_text(e)))
+ except Exception:
+ pass
+
+
+def is_subpath(child, parent, real=False):
+ """
+ Compares paths to check if one is contained in the other
+ :arg: child: Path to test
+ :arg parent; Path to test against
+ """
+ test = False
+
+ abs_child = unfrackpath(child, follow=False)
+ abs_parent = unfrackpath(parent, follow=False)
+
+ if real:
+ abs_child = os.path.realpath(abs_child)
+ abs_parent = os.path.realpath(abs_parent)
+
+ c = abs_child.split(os.path.sep)
+ p = abs_parent.split(os.path.sep)
+
+ try:
+ test = c[:len(p)] == p
+ except IndexError:
+ # child is shorter than parent so cannot be subpath
+ pass
+
+ return test
diff --git a/lib/ansible/utils/plugin_docs.py b/lib/ansible/utils/plugin_docs.py
new file mode 100644
index 0000000..3af2678
--- /dev/null
+++ b/lib/ansible/utils/plugin_docs.py
@@ -0,0 +1,351 @@
+# Copyright: (c) 2012, Jan-Piet Mens <jpmens () gmail.com>
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from collections.abc import MutableMapping, MutableSet, MutableSequence
+from pathlib import Path
+
+from ansible import constants as C
+from ansible.release import __version__ as ansible_version
+from ansible.errors import AnsibleError, AnsibleParserError, AnsiblePluginNotFound
+from ansible.module_utils.six import string_types
+from ansible.module_utils._text import to_native
+from ansible.parsing.plugin_docs import read_docstring
+from ansible.parsing.yaml.loader import AnsibleLoader
+from ansible.utils.display import Display
+
+display = Display()
+
+
+def merge_fragment(target, source):
+
+ for key, value in source.items():
+ if key in target:
+ # assumes both structures have same type
+ if isinstance(target[key], MutableMapping):
+ value.update(target[key])
+ elif isinstance(target[key], MutableSet):
+ value.add(target[key])
+ elif isinstance(target[key], MutableSequence):
+ value = sorted(frozenset(value + target[key]))
+ else:
+ raise Exception("Attempt to extend a documentation fragment, invalid type for %s" % key)
+ target[key] = value
+
+
+def _process_versions_and_dates(fragment, is_module, return_docs, callback):
+ def process_deprecation(deprecation, top_level=False):
+ collection_name = 'removed_from_collection' if top_level else 'collection_name'
+ if not isinstance(deprecation, MutableMapping):
+ return
+ if (is_module or top_level) and 'removed_in' in deprecation: # used in module deprecations
+ callback(deprecation, 'removed_in', collection_name)
+ if 'removed_at_date' in deprecation:
+ callback(deprecation, 'removed_at_date', collection_name)
+ if not (is_module or top_level) and 'version' in deprecation: # used in plugin option deprecations
+ callback(deprecation, 'version', collection_name)
+
+ def process_option_specifiers(specifiers):
+ for specifier in specifiers:
+ if not isinstance(specifier, MutableMapping):
+ continue
+ if 'version_added' in specifier:
+ callback(specifier, 'version_added', 'version_added_collection')
+ if isinstance(specifier.get('deprecated'), MutableMapping):
+ process_deprecation(specifier['deprecated'])
+
+ def process_options(options):
+ for option in options.values():
+ if not isinstance(option, MutableMapping):
+ continue
+ if 'version_added' in option:
+ callback(option, 'version_added', 'version_added_collection')
+ if not is_module:
+ if isinstance(option.get('env'), list):
+ process_option_specifiers(option['env'])
+ if isinstance(option.get('ini'), list):
+ process_option_specifiers(option['ini'])
+ if isinstance(option.get('vars'), list):
+ process_option_specifiers(option['vars'])
+ if isinstance(option.get('deprecated'), MutableMapping):
+ process_deprecation(option['deprecated'])
+ if isinstance(option.get('suboptions'), MutableMapping):
+ process_options(option['suboptions'])
+
+ def process_return_values(return_values):
+ for return_value in return_values.values():
+ if not isinstance(return_value, MutableMapping):
+ continue
+ if 'version_added' in return_value:
+ callback(return_value, 'version_added', 'version_added_collection')
+ if isinstance(return_value.get('contains'), MutableMapping):
+ process_return_values(return_value['contains'])
+
+ def process_attributes(attributes):
+ for attribute in attributes.values():
+ if not isinstance(attribute, MutableMapping):
+ continue
+ if 'version_added' in attribute:
+ callback(attribute, 'version_added', 'version_added_collection')
+
+ if not fragment:
+ return
+
+ if return_docs:
+ process_return_values(fragment)
+ return
+
+ if 'version_added' in fragment:
+ callback(fragment, 'version_added', 'version_added_collection')
+ if isinstance(fragment.get('deprecated'), MutableMapping):
+ process_deprecation(fragment['deprecated'], top_level=True)
+ if isinstance(fragment.get('options'), MutableMapping):
+ process_options(fragment['options'])
+ if isinstance(fragment.get('attributes'), MutableMapping):
+ process_attributes(fragment['attributes'])
+
+
+def add_collection_to_versions_and_dates(fragment, collection_name, is_module, return_docs=False):
+ def add(options, option, collection_name_field):
+ if collection_name_field not in options:
+ options[collection_name_field] = collection_name
+
+ _process_versions_and_dates(fragment, is_module, return_docs, add)
+
+
+def remove_current_collection_from_versions_and_dates(fragment, collection_name, is_module, return_docs=False):
+ def remove(options, option, collection_name_field):
+ if options.get(collection_name_field) == collection_name:
+ del options[collection_name_field]
+
+ _process_versions_and_dates(fragment, is_module, return_docs, remove)
+
+
+def add_fragments(doc, filename, fragment_loader, is_module=False):
+
+ fragments = doc.pop('extends_documentation_fragment', [])
+
+ if isinstance(fragments, string_types):
+ fragments = [fragments]
+
+ unknown_fragments = []
+
+ # doc_fragments are allowed to specify a fragment var other than DOCUMENTATION
+ # with a . separator; this is complicated by collections-hosted doc_fragments that
+ # use the same separator. Assume it's collection-hosted normally first, try to load
+ # as-specified. If failure, assume the right-most component is a var, split it off,
+ # and retry the load.
+ for fragment_slug in fragments:
+ fragment_name = fragment_slug
+ fragment_var = 'DOCUMENTATION'
+
+ fragment_class = fragment_loader.get(fragment_name)
+ if fragment_class is None and '.' in fragment_slug:
+ splitname = fragment_slug.rsplit('.', 1)
+ fragment_name = splitname[0]
+ fragment_var = splitname[1].upper()
+ fragment_class = fragment_loader.get(fragment_name)
+
+ if fragment_class is None:
+ unknown_fragments.append(fragment_slug)
+ continue
+
+ fragment_yaml = getattr(fragment_class, fragment_var, None)
+ if fragment_yaml is None:
+ if fragment_var != 'DOCUMENTATION':
+ # if it's asking for something specific that's missing, that's an error
+ unknown_fragments.append(fragment_slug)
+ continue
+ else:
+ fragment_yaml = '{}' # TODO: this is still an error later since we require 'options' below...
+
+ fragment = AnsibleLoader(fragment_yaml, file_name=filename).get_single_data()
+
+ real_fragment_name = getattr(fragment_class, 'ansible_name')
+ real_collection_name = '.'.join(real_fragment_name.split('.')[0:2]) if '.' in real_fragment_name else ''
+ add_collection_to_versions_and_dates(fragment, real_collection_name, is_module=is_module)
+
+ if 'notes' in fragment:
+ notes = fragment.pop('notes')
+ if notes:
+ if 'notes' not in doc:
+ doc['notes'] = []
+ doc['notes'].extend(notes)
+
+ if 'seealso' in fragment:
+ seealso = fragment.pop('seealso')
+ if seealso:
+ if 'seealso' not in doc:
+ doc['seealso'] = []
+ doc['seealso'].extend(seealso)
+
+ if 'options' not in fragment and 'attributes' not in fragment:
+ raise Exception("missing options or attributes in fragment (%s), possibly misformatted?: %s" % (fragment_name, filename))
+
+ # ensure options themselves are directly merged
+ for doc_key in ['options', 'attributes']:
+ if doc_key in fragment:
+ if doc_key in doc:
+ try:
+ merge_fragment(doc[doc_key], fragment.pop(doc_key))
+ except Exception as e:
+ raise AnsibleError("%s %s (%s) of unknown type: %s" % (to_native(e), doc_key, fragment_name, filename))
+ else:
+ doc[doc_key] = fragment.pop(doc_key)
+
+ # merge rest of the sections
+ try:
+ merge_fragment(doc, fragment)
+ except Exception as e:
+ raise AnsibleError("%s (%s) of unknown type: %s" % (to_native(e), fragment_name, filename))
+
+ if unknown_fragments:
+ raise AnsibleError('unknown doc_fragment(s) in file {0}: {1}'.format(filename, to_native(', '.join(unknown_fragments))))
+
+
+def get_docstring(filename, fragment_loader, verbose=False, ignore_errors=False, collection_name=None, is_module=None, plugin_type=None):
+ """
+ DOCUMENTATION can be extended using documentation fragments loaded by the PluginLoader from the doc_fragments plugins.
+ """
+
+ if is_module is None:
+ if plugin_type is None:
+ is_module = False
+ else:
+ is_module = (plugin_type == 'module')
+ else:
+ # TODO deprecate is_module argument, now that we have 'type'
+ pass
+
+ data = read_docstring(filename, verbose=verbose, ignore_errors=ignore_errors)
+
+ if data.get('doc', False):
+ # add collection name to versions and dates
+ if collection_name is not None:
+ add_collection_to_versions_and_dates(data['doc'], collection_name, is_module=is_module)
+
+ # add fragments to documentation
+ add_fragments(data['doc'], filename, fragment_loader=fragment_loader, is_module=is_module)
+
+ if data.get('returndocs', False):
+ # add collection name to versions and dates
+ if collection_name is not None:
+ add_collection_to_versions_and_dates(data['returndocs'], collection_name, is_module=is_module, return_docs=True)
+
+ return data['doc'], data['plainexamples'], data['returndocs'], data['metadata']
+
+
+def get_versioned_doclink(path):
+ """
+ returns a versioned documentation link for the current Ansible major.minor version; used to generate
+ in-product warning/error links to the configured DOCSITE_ROOT_URL
+ (eg, https://docs.ansible.com/ansible/2.8/somepath/doc.html)
+
+ :param path: relative path to a document under docs/docsite/rst;
+ :return: absolute URL to the specified doc for the current version of Ansible
+ """
+ path = to_native(path)
+ try:
+ base_url = C.config.get_config_value('DOCSITE_ROOT_URL')
+ if not base_url.endswith('/'):
+ base_url += '/'
+ if path.startswith('/'):
+ path = path[1:]
+ split_ver = ansible_version.split('.')
+ if len(split_ver) < 3:
+ raise RuntimeError('invalid version ({0})'.format(ansible_version))
+
+ doc_version = '{0}.{1}'.format(split_ver[0], split_ver[1])
+
+ # check to see if it's a X.Y.0 non-rc prerelease or dev release, if so, assume devel (since the X.Y doctree
+ # isn't published until beta-ish)
+ if split_ver[2].startswith('0'):
+ # exclude rc; we should have the X.Y doctree live by rc1
+ if any((pre in split_ver[2]) for pre in ['a', 'b']) or len(split_ver) > 3 and 'dev' in split_ver[3]:
+ doc_version = 'devel'
+
+ return '{0}{1}/{2}'.format(base_url, doc_version, path)
+ except Exception as ex:
+ return '(unable to create versioned doc link for path {0}: {1})'.format(path, to_native(ex))
+
+
+def _find_adjacent(path, plugin, extensions):
+
+ adjacent = Path(path)
+
+ plugin_base_name = plugin.split('.')[-1]
+ if adjacent.stem != plugin_base_name:
+ # this should only affect filters/tests
+ adjacent = adjacent.with_name(plugin_base_name)
+
+ paths = []
+ for ext in extensions:
+ candidate = adjacent.with_suffix(ext)
+ if candidate == adjacent:
+ # we're looking for an adjacent file, skip this since it's identical
+ continue
+ if candidate.exists():
+ paths.append(to_native(candidate))
+
+ return paths
+
+
+def find_plugin_docfile(plugin, plugin_type, loader):
+ ''' if the plugin lives in a non-python file (eg, win_X.ps1), require the corresponding 'sidecar' file for docs '''
+
+ context = loader.find_plugin_with_context(plugin, ignore_deprecated=False, check_aliases=True)
+ if (not context or not context.resolved) and plugin_type in ('filter', 'test'):
+ # should only happen for filters/test
+ plugin_obj, context = loader.get_with_context(plugin)
+
+ if not context or not context.resolved:
+ raise AnsiblePluginNotFound('%s was not found' % (plugin), plugin_load_context=context)
+
+ docfile = Path(context.plugin_resolved_path)
+ if docfile.suffix not in C.DOC_EXTENSIONS:
+ # only look for adjacent if plugin file does not support documents
+ filenames = _find_adjacent(docfile, plugin, C.DOC_EXTENSIONS)
+ filename = filenames[0] if filenames else None
+ else:
+ filename = to_native(docfile)
+
+ if filename is None:
+ raise AnsibleError('%s cannot contain DOCUMENTATION nor does it have a companion documentation file' % (plugin))
+
+ return filename, context.plugin_resolved_collection
+
+
+def get_plugin_docs(plugin, plugin_type, loader, fragment_loader, verbose):
+
+ docs = []
+
+ # find plugin doc file, if it doesn't exist this will throw error, we let it through
+ # can raise exception and short circuit when 'not found'
+ filename, collection_name = find_plugin_docfile(plugin, plugin_type, loader)
+
+ try:
+ docs = get_docstring(filename, fragment_loader, verbose=verbose, collection_name=collection_name, plugin_type=plugin_type)
+ except Exception as e:
+ raise AnsibleParserError('%s did not contain a DOCUMENTATION attribute (%s)' % (plugin, filename), orig_exc=e)
+
+ # no good? try adjacent
+ if not docs[0]:
+ for newfile in _find_adjacent(filename, plugin, C.DOC_EXTENSIONS):
+ try:
+ docs = get_docstring(newfile, fragment_loader, verbose=verbose, collection_name=collection_name, plugin_type=plugin_type)
+ filename = newfile
+ if docs[0] is not None:
+ break
+ except Exception as e:
+ raise AnsibleParserError('Adjacent file %s did not contain a DOCUMENTATION attribute (%s)' % (plugin, filename), orig_exc=e)
+
+ # add extra data to docs[0] (aka 'DOCUMENTATION')
+ if docs[0] is None:
+ raise AnsibleParserError('No documentation available for %s (%s)' % (plugin, filename))
+ else:
+ docs[0]['filename'] = filename
+ docs[0]['collection'] = collection_name
+
+ return docs
diff --git a/lib/ansible/utils/py3compat.py b/lib/ansible/utils/py3compat.py
new file mode 100644
index 0000000..88d9fdf
--- /dev/null
+++ b/lib/ansible/utils/py3compat.py
@@ -0,0 +1,70 @@
+# -*- coding: utf-8 -*-
+#
+# (c) 2018, Toshio Kuratomi <a.badger@gmail.com>
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+# Note that the original author of this, Toshio Kuratomi, is trying to submit this to six. If
+# successful, the code in six will be available under six's more liberal license:
+# https://mail.python.org/pipermail/python-porting/2018-July/000539.html
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os
+import sys
+
+from collections.abc import MutableMapping
+
+from ansible.module_utils.six import PY3
+from ansible.module_utils._text import to_bytes, to_text
+
+__all__ = ('environ',)
+
+
+class _TextEnviron(MutableMapping):
+ """
+ Utility class to return text strings from the environment instead of byte strings
+
+ Mimics the behaviour of os.environ on Python3
+ """
+ def __init__(self, env=None, encoding=None):
+ if env is None:
+ env = os.environ
+ self._raw_environ = env
+ self._value_cache = {}
+ # Since we're trying to mimic Python3's os.environ, use sys.getfilesystemencoding()
+ # instead of utf-8
+ if encoding is None:
+ # Since we're trying to mimic Python3's os.environ, use sys.getfilesystemencoding()
+ # instead of utf-8
+ self.encoding = sys.getfilesystemencoding()
+ else:
+ self.encoding = encoding
+
+ def __delitem__(self, key):
+ del self._raw_environ[key]
+
+ def __getitem__(self, key):
+ value = self._raw_environ[key]
+ if PY3:
+ return value
+ # Cache keys off of the undecoded values to handle any environment variables which change
+ # during a run
+ if value not in self._value_cache:
+ self._value_cache[value] = to_text(value, encoding=self.encoding,
+ nonstring='passthru', errors='surrogate_or_strict')
+ return self._value_cache[value]
+
+ def __setitem__(self, key, value):
+ self._raw_environ[key] = to_bytes(value, encoding=self.encoding, nonstring='strict',
+ errors='surrogate_or_strict')
+
+ def __iter__(self):
+ return self._raw_environ.__iter__()
+
+ def __len__(self):
+ return len(self._raw_environ)
+
+
+environ = _TextEnviron(encoding='utf-8')
diff --git a/lib/ansible/utils/sentinel.py b/lib/ansible/utils/sentinel.py
new file mode 100644
index 0000000..ca4f827
--- /dev/null
+++ b/lib/ansible/utils/sentinel.py
@@ -0,0 +1,68 @@
+# Copyright (c) 2019 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+class Sentinel:
+ """
+ Object which can be used to mark whether an entry as being special
+
+ A sentinel value demarcates a value or marks an entry as having a special meaning. In C, the
+ Null byte is used as a sentinel for the end of a string. In Python, None is often used as
+ a Sentinel in optional parameters to mean that the parameter was not set by the user.
+
+ You should use None as a Sentinel value any Python code where None is not a valid entry. If
+ None is a valid entry, though, then you need to create a different value, which is the purpose
+ of this class.
+
+ Example of using Sentinel as a default parameter value::
+
+ def confirm_big_red_button(tristate=Sentinel):
+ if tristate is Sentinel:
+ print('You must explicitly press the big red button to blow up the base')
+ elif tristate is True:
+ print('Countdown to destruction activated')
+ elif tristate is False:
+ print('Countdown stopped')
+ elif tristate is None:
+ print('Waiting for more input')
+
+ Example of using Sentinel to tell whether a dict which has a default value has been changed::
+
+ values = {'one': Sentinel, 'two': Sentinel}
+ defaults = {'one': 1, 'two': 2}
+
+ # [.. Other code which does things including setting a new value for 'one' ..]
+ values['one'] = None
+ # [..]
+
+ print('You made changes to:')
+ for key, value in values.items():
+ if value is Sentinel:
+ continue
+ print('%s: %s' % (key, value)
+ """
+
+ def __new__(cls):
+ """
+ Return the cls itself. This makes both equality and identity True for comparing the class
+ to an instance of the class, preventing common usage errors.
+
+ Preferred usage::
+
+ a = Sentinel
+ if a is Sentinel:
+ print('Sentinel value')
+
+ However, these are True as well, eliminating common usage errors::
+
+ if Sentinel is Sentinel():
+ print('Sentinel value')
+
+ if Sentinel == Sentinel():
+ print('Sentinel value')
+ """
+ return cls
diff --git a/lib/ansible/utils/shlex.py b/lib/ansible/utils/shlex.py
new file mode 100644
index 0000000..5e82021
--- /dev/null
+++ b/lib/ansible/utils/shlex.py
@@ -0,0 +1,34 @@
+# (c) 2015, Marius Gedminas <marius@gedmin.as>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# alongwith Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import shlex
+from ansible.module_utils.six import PY3
+from ansible.module_utils._text import to_bytes, to_text
+
+
+if PY3:
+ # shlex.split() wants Unicode (i.e. ``str``) input on Python 3
+ shlex_split = shlex.split
+else:
+ # shlex.split() wants bytes (i.e. ``str``) input on Python 2
+ def shlex_split(s, comments=False, posix=True):
+ return map(to_text, shlex.split(to_bytes(s), comments, posix))
+ shlex_split.__doc__ = shlex.split.__doc__
diff --git a/lib/ansible/utils/singleton.py b/lib/ansible/utils/singleton.py
new file mode 100644
index 0000000..4299403
--- /dev/null
+++ b/lib/ansible/utils/singleton.py
@@ -0,0 +1,29 @@
+# Copyright (c) 2017 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from threading import RLock
+
+
+class Singleton(type):
+ """Metaclass for classes that wish to implement Singleton
+ functionality. If an instance of the class exists, it's returned,
+ otherwise a single instance is instantiated and returned.
+ """
+ def __init__(cls, name, bases, dct):
+ super(Singleton, cls).__init__(name, bases, dct)
+ cls.__instance = None
+ cls.__rlock = RLock()
+
+ def __call__(cls, *args, **kw):
+ if cls.__instance is not None:
+ return cls.__instance
+
+ with cls.__rlock:
+ if cls.__instance is None:
+ cls.__instance = super(Singleton, cls).__call__(*args, **kw)
+
+ return cls.__instance
diff --git a/lib/ansible/utils/ssh_functions.py b/lib/ansible/utils/ssh_functions.py
new file mode 100644
index 0000000..a728889
--- /dev/null
+++ b/lib/ansible/utils/ssh_functions.py
@@ -0,0 +1,66 @@
+# (c) 2016, James Tanner
+# (c) 2016, Toshio Kuratomi <tkuratomi@ansible.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import subprocess
+
+from ansible import constants as C
+from ansible.module_utils._text import to_bytes
+from ansible.module_utils.compat.paramiko import paramiko
+
+
+_HAS_CONTROLPERSIST = {} # type: dict[str, bool]
+
+
+def check_for_controlpersist(ssh_executable):
+ try:
+ # If we've already checked this executable
+ return _HAS_CONTROLPERSIST[ssh_executable]
+ except KeyError:
+ pass
+
+ b_ssh_exec = to_bytes(ssh_executable, errors='surrogate_or_strict')
+ has_cp = True
+ try:
+ cmd = subprocess.Popen([b_ssh_exec, '-o', 'ControlPersist'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ (out, err) = cmd.communicate()
+ if b"Bad configuration option" in err or b"Usage:" in err:
+ has_cp = False
+ except OSError:
+ has_cp = False
+
+ _HAS_CONTROLPERSIST[ssh_executable] = has_cp
+ return has_cp
+
+
+# TODO: move to 'smart' connection plugin that subclasses to ssh/paramiko as needed.
+def set_default_transport():
+
+ # deal with 'smart' connection .. one time ..
+ if C.DEFAULT_TRANSPORT == 'smart':
+ # TODO: check if we can deprecate this as ssh w/o control persist should
+ # not be as common anymore.
+
+ # see if SSH can support ControlPersist if not use paramiko
+ if not check_for_controlpersist('ssh') and paramiko is not None:
+ C.DEFAULT_TRANSPORT = "paramiko"
+ else:
+ C.DEFAULT_TRANSPORT = "ssh"
diff --git a/lib/ansible/utils/unicode.py b/lib/ansible/utils/unicode.py
new file mode 100644
index 0000000..1218a6e
--- /dev/null
+++ b/lib/ansible/utils/unicode.py
@@ -0,0 +1,33 @@
+# (c) 2012-2014, Toshio Kuratomi <a.badger@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from ansible.module_utils._text import to_text
+
+
+__all__ = ('unicode_wrap',)
+
+
+def unicode_wrap(func, *args, **kwargs):
+ """If a function returns a string, force it to be a text string.
+
+ Use with partial to ensure that filter plugins will return text values.
+ """
+ return to_text(func(*args, **kwargs), nonstring='passthru')
diff --git a/lib/ansible/utils/unsafe_proxy.py b/lib/ansible/utils/unsafe_proxy.py
new file mode 100644
index 0000000..d78ebf6
--- /dev/null
+++ b/lib/ansible/utils/unsafe_proxy.py
@@ -0,0 +1,128 @@
+# PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+# --------------------------------------------
+#
+# 1. This LICENSE AGREEMENT is between the Python Software Foundation
+# ("PSF"), and the Individual or Organization ("Licensee") accessing and
+# otherwise using this software ("Python") in source or binary form and
+# its associated documentation.
+#
+# 2. Subject to the terms and conditions of this License Agreement, PSF hereby
+# grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+# analyze, test, perform and/or display publicly, prepare derivative works,
+# distribute, and otherwise use Python alone or in any derivative version,
+# provided, however, that PSF's License Agreement and PSF's notice of copyright,
+# i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+# 2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are
+# retained in Python alone or in any derivative version prepared by Licensee.
+#
+# 3. In the event Licensee prepares a derivative work that is based on
+# or incorporates Python or any part thereof, and wants to make
+# the derivative work available to others as provided herein, then
+# Licensee hereby agrees to include in any such work a brief summary of
+# the changes made to Python.
+#
+# 4. PSF is making Python available to Licensee on an "AS IS"
+# basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+# IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+# DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+# FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+# INFRINGE ANY THIRD PARTY RIGHTS.
+#
+# 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+# FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+# A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+# OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+#
+# 6. This License Agreement will automatically terminate upon a material
+# breach of its terms and conditions.
+#
+# 7. Nothing in this License Agreement shall be deemed to create any
+# relationship of agency, partnership, or joint venture between PSF and
+# Licensee. This License Agreement does not grant permission to use PSF
+# trademarks or trade name in a trademark sense to endorse or promote
+# products or services of Licensee, or any third party.
+#
+# 8. By copying, installing or otherwise using Python, Licensee
+# agrees to be bound by the terms and conditions of this License
+# Agreement.
+#
+# Original Python Recipe for Proxy:
+# http://code.activestate.com/recipes/496741-object-proxying/
+# Author: Tomer Filiba
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from collections.abc import Mapping, Set
+
+from ansible.module_utils._text import to_bytes, to_text
+from ansible.module_utils.common.collections import is_sequence
+from ansible.module_utils.six import string_types, binary_type, text_type
+from ansible.utils.native_jinja import NativeJinjaText
+
+
+__all__ = ['AnsibleUnsafe', 'wrap_var']
+
+
+class AnsibleUnsafe(object):
+ __UNSAFE__ = True
+
+
+class AnsibleUnsafeBytes(binary_type, AnsibleUnsafe):
+ def decode(self, *args, **kwargs):
+ """Wrapper method to ensure type conversions maintain unsafe context"""
+ return AnsibleUnsafeText(super(AnsibleUnsafeBytes, self).decode(*args, **kwargs))
+
+
+class AnsibleUnsafeText(text_type, AnsibleUnsafe):
+ def encode(self, *args, **kwargs):
+ """Wrapper method to ensure type conversions maintain unsafe context"""
+ return AnsibleUnsafeBytes(super(AnsibleUnsafeText, self).encode(*args, **kwargs))
+
+
+class NativeJinjaUnsafeText(NativeJinjaText, AnsibleUnsafeText):
+ pass
+
+
+def _wrap_dict(v):
+ return dict((wrap_var(k), wrap_var(item)) for k, item in v.items())
+
+
+def _wrap_sequence(v):
+ """Wraps a sequence with unsafe, not meant for strings, primarily
+ ``tuple`` and ``list``
+ """
+ v_type = type(v)
+ return v_type(wrap_var(item) for item in v)
+
+
+def _wrap_set(v):
+ return set(wrap_var(item) for item in v)
+
+
+def wrap_var(v):
+ if v is None or isinstance(v, AnsibleUnsafe):
+ return v
+
+ if isinstance(v, Mapping):
+ v = _wrap_dict(v)
+ elif isinstance(v, Set):
+ v = _wrap_set(v)
+ elif is_sequence(v):
+ v = _wrap_sequence(v)
+ elif isinstance(v, NativeJinjaText):
+ v = NativeJinjaUnsafeText(v)
+ elif isinstance(v, binary_type):
+ v = AnsibleUnsafeBytes(v)
+ elif isinstance(v, text_type):
+ v = AnsibleUnsafeText(v)
+
+ return v
+
+
+def to_unsafe_bytes(*args, **kwargs):
+ return wrap_var(to_bytes(*args, **kwargs))
+
+
+def to_unsafe_text(*args, **kwargs):
+ return wrap_var(to_text(*args, **kwargs))
diff --git a/lib/ansible/utils/vars.py b/lib/ansible/utils/vars.py
new file mode 100644
index 0000000..a3224c8
--- /dev/null
+++ b/lib/ansible/utils/vars.py
@@ -0,0 +1,293 @@
+# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import keyword
+import random
+import uuid
+
+from collections.abc import MutableMapping, MutableSequence
+from json import dumps
+
+from ansible import constants as C
+from ansible import context
+from ansible.errors import AnsibleError, AnsibleOptionsError
+from ansible.module_utils.six import string_types, PY3
+from ansible.module_utils._text import to_native, to_text
+from ansible.parsing.splitter import parse_kv
+
+
+ADDITIONAL_PY2_KEYWORDS = frozenset(("True", "False", "None"))
+
+_MAXSIZE = 2 ** 32
+cur_id = 0
+node_mac = ("%012x" % uuid.getnode())[:12]
+random_int = ("%08x" % random.randint(0, _MAXSIZE))[:8]
+
+
+def get_unique_id():
+ global cur_id
+ cur_id += 1
+ return "-".join([
+ node_mac[0:8],
+ node_mac[8:12],
+ random_int[0:4],
+ random_int[4:8],
+ ("%012x" % cur_id)[:12],
+ ])
+
+
+def _validate_mutable_mappings(a, b):
+ """
+ Internal convenience function to ensure arguments are MutableMappings
+
+ This checks that all arguments are MutableMappings or raises an error
+
+ :raises AnsibleError: if one of the arguments is not a MutableMapping
+ """
+
+ # If this becomes generally needed, change the signature to operate on
+ # a variable number of arguments instead.
+
+ if not (isinstance(a, MutableMapping) and isinstance(b, MutableMapping)):
+ myvars = []
+ for x in [a, b]:
+ try:
+ myvars.append(dumps(x))
+ except Exception:
+ myvars.append(to_native(x))
+ raise AnsibleError("failed to combine variables, expected dicts but got a '{0}' and a '{1}': \n{2}\n{3}".format(
+ a.__class__.__name__, b.__class__.__name__, myvars[0], myvars[1])
+ )
+
+
+def combine_vars(a, b, merge=None):
+ """
+ Return a copy of dictionaries of variables based on configured hash behavior
+ """
+
+ if merge or merge is None and C.DEFAULT_HASH_BEHAVIOUR == "merge":
+ return merge_hash(a, b)
+ else:
+ # HASH_BEHAVIOUR == 'replace'
+ _validate_mutable_mappings(a, b)
+ result = a | b
+ return result
+
+
+def merge_hash(x, y, recursive=True, list_merge='replace'):
+ """
+ Return a new dictionary result of the merges of y into x,
+ so that keys from y take precedence over keys from x.
+ (x and y aren't modified)
+ """
+ if list_merge not in ('replace', 'keep', 'append', 'prepend', 'append_rp', 'prepend_rp'):
+ raise AnsibleError("merge_hash: 'list_merge' argument can only be equal to 'replace', 'keep', 'append', 'prepend', 'append_rp' or 'prepend_rp'")
+
+ # verify x & y are dicts
+ _validate_mutable_mappings(x, y)
+
+ # to speed things up: if x is empty or equal to y, return y
+ # (this `if` can be remove without impact on the function
+ # except performance)
+ if x == {} or x == y:
+ return y.copy()
+
+ # in the following we will copy elements from y to x, but
+ # we don't want to modify x, so we create a copy of it
+ x = x.copy()
+
+ # to speed things up: use dict.update if possible
+ # (this `if` can be remove without impact on the function
+ # except performance)
+ if not recursive and list_merge == 'replace':
+ x.update(y)
+ return x
+
+ # insert each element of y in x, overriding the one in x
+ # (as y has higher priority)
+ # we copy elements from y to x instead of x to y because
+ # there is a high probability x will be the "default" dict the user
+ # want to "patch" with y
+ # therefore x will have much more elements than y
+ for key, y_value in y.items():
+ # if `key` isn't in x
+ # update x and move on to the next element of y
+ if key not in x:
+ x[key] = y_value
+ continue
+ # from this point we know `key` is in x
+
+ x_value = x[key]
+
+ # if both x's element and y's element are dicts
+ # recursively "combine" them or override x's with y's element
+ # depending on the `recursive` argument
+ # and move on to the next element of y
+ if isinstance(x_value, MutableMapping) and isinstance(y_value, MutableMapping):
+ if recursive:
+ x[key] = merge_hash(x_value, y_value, recursive, list_merge)
+ else:
+ x[key] = y_value
+ continue
+
+ # if both x's element and y's element are lists
+ # "merge" them depending on the `list_merge` argument
+ # and move on to the next element of y
+ if isinstance(x_value, MutableSequence) and isinstance(y_value, MutableSequence):
+ if list_merge == 'replace':
+ # replace x value by y's one as it has higher priority
+ x[key] = y_value
+ elif list_merge == 'append':
+ x[key] = x_value + y_value
+ elif list_merge == 'prepend':
+ x[key] = y_value + x_value
+ elif list_merge == 'append_rp':
+ # append all elements from y_value (high prio) to x_value (low prio)
+ # and remove x_value elements that are also in y_value
+ # we don't remove elements from x_value nor y_value that were already in double
+ # (we assume that there is a reason if there where such double elements)
+ # _rp stands for "remove present"
+ x[key] = [z for z in x_value if z not in y_value] + y_value
+ elif list_merge == 'prepend_rp':
+ # same as 'append_rp' but y_value elements are prepend
+ x[key] = y_value + [z for z in x_value if z not in y_value]
+ # else 'keep'
+ # keep x value even if y it's of higher priority
+ # it's done by not changing x[key]
+ continue
+
+ # else just override x's element with y's one
+ x[key] = y_value
+
+ return x
+
+
+def load_extra_vars(loader):
+ extra_vars = {}
+ for extra_vars_opt in context.CLIARGS.get('extra_vars', tuple()):
+ data = None
+ extra_vars_opt = to_text(extra_vars_opt, errors='surrogate_or_strict')
+ if extra_vars_opt is None or not extra_vars_opt:
+ continue
+
+ if extra_vars_opt.startswith(u"@"):
+ # Argument is a YAML file (JSON is a subset of YAML)
+ data = loader.load_from_file(extra_vars_opt[1:])
+ elif extra_vars_opt[0] in [u'/', u'.']:
+ raise AnsibleOptionsError("Please prepend extra_vars filename '%s' with '@'" % extra_vars_opt)
+ elif extra_vars_opt[0] in [u'[', u'{']:
+ # Arguments as YAML
+ data = loader.load(extra_vars_opt)
+ else:
+ # Arguments as Key-value
+ data = parse_kv(extra_vars_opt)
+
+ if isinstance(data, MutableMapping):
+ extra_vars = combine_vars(extra_vars, data)
+ else:
+ raise AnsibleOptionsError("Invalid extra vars data supplied. '%s' could not be made into a dictionary" % extra_vars_opt)
+
+ return extra_vars
+
+
+def load_options_vars(version):
+
+ if version is None:
+ version = 'Unknown'
+ options_vars = {'ansible_version': version}
+ attrs = {'check': 'check_mode',
+ 'diff': 'diff_mode',
+ 'forks': 'forks',
+ 'inventory': 'inventory_sources',
+ 'skip_tags': 'skip_tags',
+ 'subset': 'limit',
+ 'tags': 'run_tags',
+ 'verbosity': 'verbosity'}
+
+ for attr, alias in attrs.items():
+ opt = context.CLIARGS.get(attr)
+ if opt is not None:
+ options_vars['ansible_%s' % alias] = opt
+
+ return options_vars
+
+
+def _isidentifier_PY3(ident):
+ if not isinstance(ident, string_types):
+ return False
+
+ # NOTE Python 3.7 offers str.isascii() so switch over to using it once
+ # we stop supporting 3.5 and 3.6 on the controller
+ try:
+ # Python 2 does not allow non-ascii characters in identifiers so unify
+ # the behavior for Python 3
+ ident.encode('ascii')
+ except UnicodeEncodeError:
+ return False
+
+ if not ident.isidentifier():
+ return False
+
+ if keyword.iskeyword(ident):
+ return False
+
+ return True
+
+
+def _isidentifier_PY2(ident):
+ if not isinstance(ident, string_types):
+ return False
+
+ if not ident:
+ return False
+
+ if C.INVALID_VARIABLE_NAMES.search(ident):
+ return False
+
+ if keyword.iskeyword(ident) or ident in ADDITIONAL_PY2_KEYWORDS:
+ return False
+
+ return True
+
+
+if PY3:
+ isidentifier = _isidentifier_PY3
+else:
+ isidentifier = _isidentifier_PY2
+
+
+isidentifier.__doc__ = """Determine if string is valid identifier.
+
+The purpose of this function is to be used to validate any variables created in
+a play to be valid Python identifiers and to not conflict with Python keywords
+to prevent unexpected behavior. Since Python 2 and Python 3 differ in what
+a valid identifier is, this function unifies the validation so playbooks are
+portable between the two. The following changes were made:
+
+ * disallow non-ascii characters (Python 3 allows for them as opposed to Python 2)
+ * True, False and None are reserved keywords (these are reserved keywords
+ on Python 3 as opposed to Python 2)
+
+:arg ident: A text string of identifier to check. Note: It is callers
+ responsibility to convert ident to text if it is not already.
+
+Originally posted at http://stackoverflow.com/a/29586366
+"""
diff --git a/lib/ansible/utils/version.py b/lib/ansible/utils/version.py
new file mode 100644
index 0000000..c045e7d
--- /dev/null
+++ b/lib/ansible/utils/version.py
@@ -0,0 +1,272 @@
+# Copyright (c) 2020 Matt Martz <matt@sivel.net>
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import re
+
+from ansible.module_utils.compat.version import LooseVersion, Version
+
+from ansible.module_utils.six import text_type
+
+
+# Regular expression taken from
+# https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string
+SEMVER_RE = re.compile(
+ r'''
+ ^
+ (?P<major>0|[1-9]\d*)
+ \.
+ (?P<minor>0|[1-9]\d*)
+ \.
+ (?P<patch>0|[1-9]\d*)
+ (?:
+ -
+ (?P<prerelease>
+ (?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)
+ (?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*
+ )
+ )?
+ (?:
+ \+
+ (?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*)
+ )?
+ $
+ ''',
+ flags=re.X
+)
+
+
+class _Alpha:
+ """Class to easily allow comparing strings
+
+ Largely this exists to make comparing an integer and a string on py3
+ so that it works like py2.
+ """
+ def __init__(self, specifier):
+ self.specifier = specifier
+
+ def __repr__(self):
+ return repr(self.specifier)
+
+ def __eq__(self, other):
+ if isinstance(other, _Alpha):
+ return self.specifier == other.specifier
+ elif isinstance(other, str):
+ return self.specifier == other
+
+ return False
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __lt__(self, other):
+ if isinstance(other, _Alpha):
+ return self.specifier < other.specifier
+ elif isinstance(other, str):
+ return self.specifier < other
+ elif isinstance(other, _Numeric):
+ return False
+
+ raise ValueError
+
+ def __le__(self, other):
+ return self.__lt__(other) or self.__eq__(other)
+
+ def __gt__(self, other):
+ return not self.__le__(other)
+
+ def __ge__(self, other):
+ return not self.__lt__(other)
+
+
+class _Numeric:
+ """Class to easily allow comparing numbers
+
+ Largely this exists to make comparing an integer and a string on py3
+ so that it works like py2.
+ """
+ def __init__(self, specifier):
+ self.specifier = int(specifier)
+
+ def __repr__(self):
+ return repr(self.specifier)
+
+ def __eq__(self, other):
+ if isinstance(other, _Numeric):
+ return self.specifier == other.specifier
+ elif isinstance(other, int):
+ return self.specifier == other
+
+ return False
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __lt__(self, other):
+ if isinstance(other, _Numeric):
+ return self.specifier < other.specifier
+ elif isinstance(other, int):
+ return self.specifier < other
+ elif isinstance(other, _Alpha):
+ return True
+
+ raise ValueError
+
+ def __le__(self, other):
+ return self.__lt__(other) or self.__eq__(other)
+
+ def __gt__(self, other):
+ return not self.__le__(other)
+
+ def __ge__(self, other):
+ return not self.__lt__(other)
+
+
+class SemanticVersion(Version):
+ """Version comparison class that implements Semantic Versioning 2.0.0
+
+ Based off of ``distutils.version.Version``
+ """
+
+ version_re = SEMVER_RE
+
+ def __init__(self, vstring=None):
+ self.vstring = vstring
+ self.major = None
+ self.minor = None
+ self.patch = None
+ self.prerelease = ()
+ self.buildmetadata = ()
+
+ if vstring:
+ self.parse(vstring)
+
+ def __repr__(self):
+ return 'SemanticVersion(%r)' % self.vstring
+
+ @staticmethod
+ def from_loose_version(loose_version):
+ """This method is designed to take a ``LooseVersion``
+ and attempt to construct a ``SemanticVersion`` from it
+
+ This is useful where you want to do simple version math
+ without requiring users to provide a compliant semver.
+ """
+ if not isinstance(loose_version, LooseVersion):
+ raise ValueError("%r is not a LooseVersion" % loose_version)
+
+ try:
+ version = loose_version.version[:]
+ except AttributeError:
+ raise ValueError("%r is not a LooseVersion" % loose_version)
+
+ extra_idx = 3
+ for marker in ('-', '+'):
+ try:
+ idx = version.index(marker)
+ except ValueError:
+ continue
+ else:
+ if idx < extra_idx:
+ extra_idx = idx
+ version = version[:extra_idx]
+
+ if version and set(type(v) for v in version) != set((int,)):
+ raise ValueError("Non integer values in %r" % loose_version)
+
+ # Extra is everything to the right of the core version
+ extra = re.search('[+-].+$', loose_version.vstring)
+
+ version = version + [0] * (3 - len(version))
+ return SemanticVersion(
+ '%s%s' % (
+ '.'.join(str(v) for v in version),
+ extra.group(0) if extra else ''
+ )
+ )
+
+ def parse(self, vstring):
+ match = SEMVER_RE.match(vstring)
+ if not match:
+ raise ValueError("invalid semantic version '%s'" % vstring)
+
+ (major, minor, patch, prerelease, buildmetadata) = match.group(1, 2, 3, 4, 5)
+ self.major = int(major)
+ self.minor = int(minor)
+ self.patch = int(patch)
+
+ if prerelease:
+ self.prerelease = tuple(_Numeric(x) if x.isdigit() else _Alpha(x) for x in prerelease.split('.'))
+ if buildmetadata:
+ self.buildmetadata = tuple(_Numeric(x) if x.isdigit() else _Alpha(x) for x in buildmetadata.split('.'))
+
+ @property
+ def core(self):
+ return self.major, self.minor, self.patch
+
+ @property
+ def is_prerelease(self):
+ return bool(self.prerelease)
+
+ @property
+ def is_stable(self):
+ # Major version zero (0.y.z) is for initial development. Anything MAY change at any time.
+ # The public API SHOULD NOT be considered stable.
+ # https://semver.org/#spec-item-4
+ return not (self.major == 0 or self.is_prerelease)
+
+ def _cmp(self, other):
+ if isinstance(other, str):
+ other = SemanticVersion(other)
+
+ if self.core != other.core:
+ # if the core version doesn't match
+ # prerelease and buildmetadata doesn't matter
+ if self.core < other.core:
+ return -1
+ else:
+ return 1
+
+ if not any((self.prerelease, other.prerelease)):
+ return 0
+
+ if self.prerelease and not other.prerelease:
+ return -1
+ elif not self.prerelease and other.prerelease:
+ return 1
+ else:
+ if self.prerelease < other.prerelease:
+ return -1
+ elif self.prerelease > other.prerelease:
+ return 1
+
+ # Build metadata MUST be ignored when determining version precedence
+ # https://semver.org/#spec-item-10
+ # With the above in mind it is ignored here
+
+ # If we have made it here, things should be equal
+ return 0
+
+ # The Py2 and Py3 implementations of distutils.version.Version
+ # are quite different, this makes the Py2 and Py3 implementations
+ # the same
+ def __eq__(self, other):
+ return self._cmp(other) == 0
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __lt__(self, other):
+ return self._cmp(other) < 0
+
+ def __le__(self, other):
+ return self._cmp(other) <= 0
+
+ def __gt__(self, other):
+ return self._cmp(other) > 0
+
+ def __ge__(self, other):
+ return self._cmp(other) >= 0