summaryrefslogtreecommitdiffstats
path: root/lib/ansible/galaxy
diff options
context:
space:
mode:
Diffstat (limited to 'lib/ansible/galaxy')
-rw-r--r--lib/ansible/galaxy/__init__.py2
-rw-r--r--lib/ansible/galaxy/api.py21
-rw-r--r--lib/ansible/galaxy/collection/__init__.py192
-rw-r--r--lib/ansible/galaxy/collection/concrete_artifact_manager.py111
-rw-r--r--lib/ansible/galaxy/collection/galaxy_api_proxy.py2
-rw-r--r--lib/ansible/galaxy/data/container/README.md8
-rw-r--r--lib/ansible/galaxy/dependency_resolution/__init__.py7
-rw-r--r--lib/ansible/galaxy/dependency_resolution/dataclasses.py66
-rw-r--r--lib/ansible/galaxy/dependency_resolution/errors.py2
-rw-r--r--lib/ansible/galaxy/dependency_resolution/providers.py134
-rw-r--r--lib/ansible/galaxy/role.py75
-rw-r--r--lib/ansible/galaxy/token.py4
12 files changed, 347 insertions, 277 deletions
diff --git a/lib/ansible/galaxy/__init__.py b/lib/ansible/galaxy/__init__.py
index d3b9035..26d9f14 100644
--- a/lib/ansible/galaxy/__init__.py
+++ b/lib/ansible/galaxy/__init__.py
@@ -27,7 +27,7 @@ import os
import ansible.constants as C
from ansible import context
-from ansible.module_utils._text import to_bytes
+from ansible.module_utils.common.text.converters import to_bytes
from ansible.module_utils.common.yaml import yaml_load
# default_readme_template
diff --git a/lib/ansible/galaxy/api.py b/lib/ansible/galaxy/api.py
index 0d51998..af7f162 100644
--- a/lib/ansible/galaxy/api.py
+++ b/lib/ansible/galaxy/api.py
@@ -11,7 +11,6 @@ import functools
import hashlib
import json
import os
-import socket
import stat
import tarfile
import time
@@ -28,7 +27,7 @@ from ansible.galaxy.user_agent import user_agent
from ansible.module_utils.api import retry_with_delays_and_condition
from ansible.module_utils.api import generate_jittered_backoff
from ansible.module_utils.six import string_types
-from ansible.module_utils._text import to_bytes, to_native, to_text
+from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
from ansible.module_utils.urls import open_url, prepare_multipart
from ansible.utils.display import Display
from ansible.utils.hashing import secure_hash_s
@@ -66,7 +65,7 @@ def should_retry_error(exception):
# Handle common URL related errors such as TimeoutError, and BadStatusLine
# Note: socket.timeout is only required for Py3.9
- if isinstance(orig_exc, (TimeoutError, BadStatusLine, IncompleteRead, socket.timeout)):
+ if isinstance(orig_exc, (TimeoutError, BadStatusLine, IncompleteRead)):
return True
return False
@@ -360,7 +359,8 @@ class GalaxyAPI:
valid = False
if cache_key in server_cache:
expires = datetime.datetime.strptime(server_cache[cache_key]['expires'], iso_datetime_format)
- valid = datetime.datetime.utcnow() < expires
+ expires = expires.replace(tzinfo=datetime.timezone.utc)
+ valid = datetime.datetime.now(datetime.timezone.utc) < expires
is_paginated_url = 'page' in query or 'offset' in query
if valid and not is_paginated_url:
@@ -385,7 +385,7 @@ class GalaxyAPI:
elif not is_paginated_url:
# The cache entry had expired or does not exist, start a new blank entry to be filled later.
- expires = datetime.datetime.utcnow()
+ expires = datetime.datetime.now(datetime.timezone.utc)
expires += datetime.timedelta(days=1)
server_cache[cache_key] = {
'expires': expires.strftime(iso_datetime_format),
@@ -483,8 +483,6 @@ class GalaxyAPI:
}
if role_name:
args['alternate_role_name'] = role_name
- elif github_repo.startswith('ansible-role'):
- args['alternate_role_name'] = github_repo[len('ansible-role') + 1:]
data = self._call_galaxy(url, args=urlencode(args), method="POST")
if data.get('results', None):
return data['results']
@@ -923,10 +921,7 @@ class GalaxyAPI:
data = self._call_galaxy(n_collection_url, error_context_msg=error_context_msg, cache=True)
self._set_cache()
- try:
- signatures = data["signatures"]
- except KeyError:
+ signatures = [signature_info["signature"] for signature_info in data.get("signatures") or []]
+ if not signatures:
display.vvvv(f"Server {self.api_server} has not signed {namespace}.{name}:{version}")
- return []
- else:
- return [signature_info["signature"] for signature_info in signatures]
+ return signatures
diff --git a/lib/ansible/galaxy/collection/__init__.py b/lib/ansible/galaxy/collection/__init__.py
index 84444d8..60c9c94 100644
--- a/lib/ansible/galaxy/collection/__init__.py
+++ b/lib/ansible/galaxy/collection/__init__.py
@@ -11,6 +11,7 @@ import fnmatch
import functools
import json
import os
+import pathlib
import queue
import re
import shutil
@@ -83,6 +84,7 @@ if t.TYPE_CHECKING:
FilesManifestType = t.Dict[t.Literal['files', 'format'], t.Union[t.List[FileManifestEntryType], int]]
import ansible.constants as C
+from ansible.compat.importlib_resources import files
from ansible.errors import AnsibleError
from ansible.galaxy.api import GalaxyAPI
from ansible.galaxy.collection.concrete_artifact_manager import (
@@ -122,8 +124,7 @@ from ansible.galaxy.dependency_resolution.dataclasses import (
)
from ansible.galaxy.dependency_resolution.versioning import meets_requirements
from ansible.plugins.loader import get_all_plugin_loaders
-from ansible.module_utils.six import raise_from
-from ansible.module_utils._text import to_bytes, to_native, to_text
+from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
from ansible.module_utils.common.collections import is_sequence
from ansible.module_utils.common.yaml import yaml_dump
from ansible.utils.collection_loader import AnsibleCollectionRef
@@ -282,11 +283,8 @@ def verify_local_collection(local_collection, remote_collection, artifacts_manag
manifest_hash = get_hash_from_validation_source(MANIFEST_FILENAME)
else:
# fetch remote
- b_temp_tar_path = ( # NOTE: AnsibleError is raised on URLError
- artifacts_manager.get_artifact_path
- if remote_collection.is_concrete_artifact
- else artifacts_manager.get_galaxy_artifact_path
- )(remote_collection)
+ # NOTE: AnsibleError is raised on URLError
+ b_temp_tar_path = artifacts_manager.get_artifact_path_from_unknown(remote_collection)
display.vvv(
u"Remote collection cached as '{path!s}'".format(path=to_text(b_temp_tar_path))
@@ -470,7 +468,7 @@ def build_collection(u_collection_path, u_output_path, force):
try:
collection_meta = _get_meta_from_src_dir(b_collection_path)
except LookupError as lookup_err:
- raise_from(AnsibleError(to_native(lookup_err)), lookup_err)
+ raise AnsibleError(to_native(lookup_err)) from lookup_err
collection_manifest = _build_manifest(**collection_meta)
file_manifest = _build_files_manifest(
@@ -479,6 +477,7 @@ def build_collection(u_collection_path, u_output_path, force):
collection_meta['name'], # type: ignore[arg-type]
collection_meta['build_ignore'], # type: ignore[arg-type]
collection_meta['manifest'], # type: ignore[arg-type]
+ collection_meta['license_file'], # type: ignore[arg-type]
)
artifact_tarball_file_name = '{ns!s}-{name!s}-{ver!s}.tar.gz'.format(
@@ -545,7 +544,7 @@ def download_collections(
for fqcn, concrete_coll_pin in dep_map.copy().items(): # FIXME: move into the provider
if concrete_coll_pin.is_virtual:
display.display(
- '{coll!s} is not downloadable'.
+ 'Virtual collection {coll!s} is not downloadable'.
format(coll=to_text(concrete_coll_pin)),
)
continue
@@ -555,11 +554,7 @@ def download_collections(
format(coll=to_text(concrete_coll_pin), path=to_text(b_output_path)),
)
- b_src_path = (
- artifacts_manager.get_artifact_path
- if concrete_coll_pin.is_concrete_artifact
- else artifacts_manager.get_galaxy_artifact_path
- )(concrete_coll_pin)
+ b_src_path = artifacts_manager.get_artifact_path_from_unknown(concrete_coll_pin)
b_dest_path = os.path.join(
b_output_path,
@@ -659,6 +654,7 @@ def install_collections(
artifacts_manager, # type: ConcreteArtifactsManager
disable_gpg_verify, # type: bool
offline, # type: bool
+ read_requirement_paths, # type: set[str]
): # type: (...) -> None
"""Install Ansible collections to the path specified.
@@ -673,13 +669,14 @@ def install_collections(
"""
existing_collections = {
Requirement(coll.fqcn, coll.ver, coll.src, coll.type, None)
- for coll in find_existing_collections(output_path, artifacts_manager)
+ for path in {output_path} | read_requirement_paths
+ for coll in find_existing_collections(path, artifacts_manager)
}
unsatisfied_requirements = set(
chain.from_iterable(
(
- Requirement.from_dir_path(sub_coll, artifacts_manager)
+ Requirement.from_dir_path(to_bytes(sub_coll), artifacts_manager)
for sub_coll in (
artifacts_manager.
get_direct_collection_dependencies(install_req).
@@ -744,7 +741,7 @@ def install_collections(
for fqcn, concrete_coll_pin in dependency_map.items():
if concrete_coll_pin.is_virtual:
display.vvvv(
- "Encountered {coll!s}, skipping.".
+ "'{coll!s}' is virtual, skipping.".
format(coll=to_text(concrete_coll_pin)),
)
continue
@@ -1065,8 +1062,9 @@ def _make_entry(name, ftype, chksum_type='sha256', chksum=None):
}
-def _build_files_manifest(b_collection_path, namespace, name, ignore_patterns, manifest_control):
- # type: (bytes, str, str, list[str], dict[str, t.Any]) -> FilesManifestType
+def _build_files_manifest(b_collection_path, namespace, name, ignore_patterns,
+ manifest_control, license_file):
+ # type: (bytes, str, str, list[str], dict[str, t.Any], t.Optional[str]) -> FilesManifestType
if ignore_patterns and manifest_control is not Sentinel:
raise AnsibleError('"build_ignore" and "manifest" are mutually exclusive')
@@ -1076,14 +1074,15 @@ def _build_files_manifest(b_collection_path, namespace, name, ignore_patterns, m
namespace,
name,
manifest_control,
+ license_file,
)
return _build_files_manifest_walk(b_collection_path, namespace, name, ignore_patterns)
-def _build_files_manifest_distlib(b_collection_path, namespace, name, manifest_control):
- # type: (bytes, str, str, dict[str, t.Any]) -> FilesManifestType
-
+def _build_files_manifest_distlib(b_collection_path, namespace, name, manifest_control,
+ license_file):
+ # type: (bytes, str, str, dict[str, t.Any], t.Optional[str]) -> FilesManifestType
if not HAS_DISTLIB:
raise AnsibleError('Use of "manifest" requires the python "distlib" library')
@@ -1116,15 +1115,20 @@ def _build_files_manifest_distlib(b_collection_path, namespace, name, manifest_c
else:
directives.extend([
'include meta/*.yml',
- 'include *.txt *.md *.rst COPYING LICENSE',
+ 'include *.txt *.md *.rst *.license COPYING LICENSE',
+ 'recursive-include .reuse **',
+ 'recursive-include LICENSES **',
'recursive-include tests **',
- 'recursive-include docs **.rst **.yml **.yaml **.json **.j2 **.txt',
- 'recursive-include roles **.yml **.yaml **.json **.j2',
- 'recursive-include playbooks **.yml **.yaml **.json',
- 'recursive-include changelogs **.yml **.yaml',
- 'recursive-include plugins */**.py',
+ 'recursive-include docs **.rst **.yml **.yaml **.json **.j2 **.txt **.license',
+ 'recursive-include roles **.yml **.yaml **.json **.j2 **.license',
+ 'recursive-include playbooks **.yml **.yaml **.json **.license',
+ 'recursive-include changelogs **.yml **.yaml **.license',
+ 'recursive-include plugins */**.py */**.license',
])
+ if license_file:
+ directives.append(f'include {license_file}')
+
plugins = set(l.package.split('.')[-1] for d, l in get_all_plugin_loaders())
for plugin in sorted(plugins):
if plugin in ('modules', 'module_utils'):
@@ -1135,8 +1139,8 @@ def _build_files_manifest_distlib(b_collection_path, namespace, name, manifest_c
)
directives.extend([
- 'recursive-include plugins/modules **.ps1 **.yml **.yaml',
- 'recursive-include plugins/module_utils **.ps1 **.psm1 **.cs',
+ 'recursive-include plugins/modules **.ps1 **.yml **.yaml **.license',
+ 'recursive-include plugins/module_utils **.ps1 **.psm1 **.cs **.license',
])
directives.extend(control.directives)
@@ -1144,7 +1148,7 @@ def _build_files_manifest_distlib(b_collection_path, namespace, name, manifest_c
directives.extend([
f'exclude galaxy.yml galaxy.yaml MANIFEST.json FILES.json {namespace}-{name}-*.tar.gz',
'recursive-exclude tests/output **',
- 'global-exclude /.* /__pycache__',
+ 'global-exclude /.* /__pycache__ *.pyc *.pyo *.bak *~ *.swp',
])
display.vvv('Manifest Directives:')
@@ -1321,6 +1325,8 @@ def _build_collection_tar(
if os.path.islink(b_src_path):
b_link_target = os.path.realpath(b_src_path)
+ if not os.path.exists(b_link_target):
+ raise AnsibleError(f"Failed to find the target path '{to_native(b_link_target)}' for the symlink '{to_native(b_src_path)}'.")
if _is_child_path(b_link_target, b_collection_path):
b_rel_path = os.path.relpath(b_link_target, start=os.path.dirname(b_src_path))
@@ -1375,51 +1381,101 @@ def _build_collection_dir(b_collection_path, b_collection_output, collection_man
src_file = os.path.join(b_collection_path, to_bytes(file_info['name'], errors='surrogate_or_strict'))
dest_file = os.path.join(b_collection_output, to_bytes(file_info['name'], errors='surrogate_or_strict'))
- existing_is_exec = os.stat(src_file).st_mode & stat.S_IXUSR
+ existing_is_exec = os.stat(src_file, follow_symlinks=False).st_mode & stat.S_IXUSR
mode = 0o0755 if existing_is_exec else 0o0644
- if os.path.isdir(src_file):
+ # ensure symlinks to dirs are not translated to empty dirs
+ if os.path.isdir(src_file) and not os.path.islink(src_file):
mode = 0o0755
base_directories.append(src_file)
os.mkdir(dest_file, mode)
else:
- shutil.copyfile(src_file, dest_file)
+ # do not follow symlinks to ensure the original link is used
+ shutil.copyfile(src_file, dest_file, follow_symlinks=False)
+
+ # avoid setting specific permission on symlinks since it does not
+ # support avoid following symlinks and will thrown an exception if the
+ # symlink target does not exist
+ if not os.path.islink(dest_file):
+ os.chmod(dest_file, mode)
- os.chmod(dest_file, mode)
collection_output = to_text(b_collection_output)
return collection_output
-def find_existing_collections(path, artifacts_manager):
+def _normalize_collection_path(path):
+ str_path = path.as_posix() if isinstance(path, pathlib.Path) else path
+ return pathlib.Path(
+ # This is annoying, but GalaxyCLI._resolve_path did it
+ os.path.expandvars(str_path)
+ ).expanduser().absolute()
+
+
+def find_existing_collections(path_filter, artifacts_manager, namespace_filter=None, collection_filter=None, dedupe=True):
"""Locate all collections under a given path.
:param path: Collection dirs layout search path.
:param artifacts_manager: Artifacts manager.
"""
- b_path = to_bytes(path, errors='surrogate_or_strict')
+ if files is None:
+ raise AnsibleError('importlib_resources is not installed and is required')
+
+ if path_filter and not is_sequence(path_filter):
+ path_filter = [path_filter]
+ if namespace_filter and not is_sequence(namespace_filter):
+ namespace_filter = [namespace_filter]
+ if collection_filter and not is_sequence(collection_filter):
+ collection_filter = [collection_filter]
+
+ paths = set()
+ for path in files('ansible_collections').glob('*/*/'):
+ path = _normalize_collection_path(path)
+ if not path.is_dir():
+ continue
+ if path_filter:
+ for pf in path_filter:
+ try:
+ path.relative_to(_normalize_collection_path(pf))
+ except ValueError:
+ continue
+ break
+ else:
+ continue
+ paths.add(path)
- # FIXME: consider using `glob.glob()` to simplify looping
- for b_namespace in os.listdir(b_path):
- b_namespace_path = os.path.join(b_path, b_namespace)
- if os.path.isfile(b_namespace_path):
+ seen = set()
+ for path in paths:
+ namespace = path.parent.name
+ name = path.name
+ if namespace_filter and namespace not in namespace_filter:
+ continue
+ if collection_filter and name not in collection_filter:
continue
- # FIXME: consider feeding b_namespace_path to Candidate.from_dir_path to get subdirs automatically
- for b_collection in os.listdir(b_namespace_path):
- b_collection_path = os.path.join(b_namespace_path, b_collection)
- if not os.path.isdir(b_collection_path):
+ if dedupe:
+ try:
+ collection_path = files(f'ansible_collections.{namespace}.{name}')
+ except ImportError:
continue
+ if collection_path in seen:
+ continue
+ seen.add(collection_path)
+ else:
+ collection_path = path
- try:
- req = Candidate.from_dir_path_as_unknown(b_collection_path, artifacts_manager)
- except ValueError as val_err:
- raise_from(AnsibleError(val_err), val_err)
+ b_collection_path = to_bytes(collection_path.as_posix())
- display.vvv(
- u"Found installed collection {coll!s} at '{path!s}'".
- format(coll=to_text(req), path=to_text(req.src))
- )
- yield req
+ try:
+ req = Candidate.from_dir_path_as_unknown(b_collection_path, artifacts_manager)
+ except ValueError as val_err:
+ display.warning(f'{val_err}')
+ continue
+
+ display.vvv(
+ u"Found installed collection {coll!s} at '{path!s}'".
+ format(coll=to_text(req), path=to_text(req.src))
+ )
+ yield req
def install(collection, path, artifacts_manager): # FIXME: mv to dataclasses?
@@ -1430,10 +1486,7 @@ def install(collection, path, artifacts_manager): # FIXME: mv to dataclasses?
:param path: Collection dirs layout path.
:param artifacts_manager: Artifacts manager.
"""
- b_artifact_path = (
- artifacts_manager.get_artifact_path if collection.is_concrete_artifact
- else artifacts_manager.get_galaxy_artifact_path
- )(collection)
+ b_artifact_path = artifacts_manager.get_artifact_path_from_unknown(collection)
collection_path = os.path.join(path, collection.namespace, collection.name)
b_collection_path = to_bytes(collection_path, errors='surrogate_or_strict')
@@ -1587,6 +1640,7 @@ def install_src(collection, b_collection_path, b_collection_output_path, artifac
collection_meta['namespace'], collection_meta['name'],
collection_meta['build_ignore'],
collection_meta['manifest'],
+ collection_meta['license_file'],
)
collection_output_path = _build_collection_dir(
@@ -1763,10 +1817,15 @@ def _resolve_depenency_map(
elif not req.specifier.contains(RESOLVELIB_VERSION.vstring):
raise AnsibleError(f"ansible-galaxy requires {req.name}{req.specifier}")
+ pre_release_hint = '' if allow_pre_release else (
+ 'Hint: Pre-releases hosted on Galaxy or Automation Hub are not '
+ 'installed by default unless a specific version is requested. '
+ 'To enable pre-releases globally, use --pre.'
+ )
+
collection_dep_resolver = build_collection_dependency_resolver(
galaxy_apis=galaxy_apis,
concrete_artifacts_manager=concrete_artifacts_manager,
- user_requirements=requested_requirements,
preferred_candidates=preferred_candidates,
with_deps=not no_deps,
with_pre_releases=allow_pre_release,
@@ -1798,13 +1857,12 @@ def _resolve_depenency_map(
),
conflict_causes,
))
- raise raise_from( # NOTE: Leading "raise" is a hack for mypy bug #9717
- AnsibleError('\n'.join(error_msg_lines)),
- dep_exc,
- )
+ error_msg_lines.append(pre_release_hint)
+ raise AnsibleError('\n'.join(error_msg_lines)) from dep_exc
except CollectionDependencyInconsistentCandidate as dep_exc:
parents = [
- str(p) for p in dep_exc.criterion.iter_parent()
+ "%s.%s:%s" % (p.namespace, p.name, p.ver)
+ for p in dep_exc.criterion.iter_parent()
if p is not None
]
@@ -1826,10 +1884,8 @@ def _resolve_depenency_map(
error_msg_lines.append(
'* {req.fqcn!s}:{req.ver!s}'.format(req=req)
)
+ error_msg_lines.append(pre_release_hint)
- raise raise_from( # NOTE: Leading "raise" is a hack for mypy bug #9717
- AnsibleError('\n'.join(error_msg_lines)),
- dep_exc,
- )
+ raise AnsibleError('\n'.join(error_msg_lines)) from dep_exc
except ValueError as exc:
raise AnsibleError(to_native(exc)) from exc
diff --git a/lib/ansible/galaxy/collection/concrete_artifact_manager.py b/lib/ansible/galaxy/collection/concrete_artifact_manager.py
index 67d8e43..d251127 100644
--- a/lib/ansible/galaxy/collection/concrete_artifact_manager.py
+++ b/lib/ansible/galaxy/collection/concrete_artifact_manager.py
@@ -21,7 +21,7 @@ from tempfile import mkdtemp
if t.TYPE_CHECKING:
from ansible.galaxy.dependency_resolution.dataclasses import (
- Candidate, Requirement,
+ Candidate, Collection, Requirement,
)
from ansible.galaxy.token import GalaxyToken
@@ -30,13 +30,11 @@ from ansible.galaxy import get_collections_galaxy_meta_info
from ansible.galaxy.api import should_retry_error
from ansible.galaxy.dependency_resolution.dataclasses import _GALAXY_YAML
from ansible.galaxy.user_agent import user_agent
-from ansible.module_utils._text import to_bytes, to_native, to_text
+from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
from ansible.module_utils.api import retry_with_delays_and_condition
from ansible.module_utils.api import generate_jittered_backoff
from ansible.module_utils.common.process import get_bin_path
-from ansible.module_utils.common._collections_compat import MutableMapping
from ansible.module_utils.common.yaml import yaml_load
-from ansible.module_utils.six import raise_from
from ansible.module_utils.urls import open_url
from ansible.utils.display import Display
from ansible.utils.sentinel import Sentinel
@@ -141,13 +139,10 @@ class ConcreteArtifactsManager:
try:
url, sha256_hash, token = self._galaxy_collection_cache[collection]
except KeyError as key_err:
- raise_from(
- RuntimeError(
- 'The is no known source for {coll!s}'.
- format(coll=collection),
- ),
- key_err,
- )
+ raise RuntimeError(
+ 'There is no known source for {coll!s}'.
+ format(coll=collection),
+ ) from key_err
display.vvvv(
"Fetching a collection tarball for '{collection!s}' from "
@@ -195,7 +190,7 @@ class ConcreteArtifactsManager:
return b_artifact_path
def get_artifact_path(self, collection):
- # type: (t.Union[Candidate, Requirement]) -> bytes
+ # type: (Collection) -> bytes
"""Given a concrete collection pointer, return a cached path.
If it's not yet on disk, this method downloads the artifact first.
@@ -230,17 +225,14 @@ class ConcreteArtifactsManager:
timeout=self.timeout
)
except Exception as err:
- raise_from(
- AnsibleError(
- 'Failed to download collection tar '
- "from '{coll_src!s}': {download_err!s}".
- format(
- coll_src=to_native(collection.src),
- download_err=to_native(err),
- ),
+ raise AnsibleError(
+ 'Failed to download collection tar '
+ "from '{coll_src!s}': {download_err!s}".
+ format(
+ coll_src=to_native(collection.src),
+ download_err=to_native(err),
),
- err,
- )
+ ) from err
elif collection.is_scm:
b_artifact_path = _extract_collection_from_git(
collection.src,
@@ -259,16 +251,22 @@ class ConcreteArtifactsManager:
self._artifact_cache[collection.src] = b_artifact_path
return b_artifact_path
+ def get_artifact_path_from_unknown(self, collection):
+ # type: (Candidate) -> bytes
+ if collection.is_concrete_artifact:
+ return self.get_artifact_path(collection)
+ return self.get_galaxy_artifact_path(collection)
+
def _get_direct_collection_namespace(self, collection):
# type: (Candidate) -> t.Optional[str]
return self.get_direct_collection_meta(collection)['namespace'] # type: ignore[return-value]
def _get_direct_collection_name(self, collection):
- # type: (Candidate) -> t.Optional[str]
+ # type: (Collection) -> t.Optional[str]
return self.get_direct_collection_meta(collection)['name'] # type: ignore[return-value]
def get_direct_collection_fqcn(self, collection):
- # type: (Candidate) -> t.Optional[str]
+ # type: (Collection) -> t.Optional[str]
"""Extract FQCN from the given on-disk collection artifact.
If the collection is virtual, ``None`` is returned instead
@@ -284,7 +282,7 @@ class ConcreteArtifactsManager:
))
def get_direct_collection_version(self, collection):
- # type: (t.Union[Candidate, Requirement]) -> str
+ # type: (Collection) -> str
"""Extract version from the given on-disk collection artifact."""
return self.get_direct_collection_meta(collection)['version'] # type: ignore[return-value]
@@ -297,7 +295,7 @@ class ConcreteArtifactsManager:
return collection_dependencies # type: ignore[return-value]
def get_direct_collection_meta(self, collection):
- # type: (t.Union[Candidate, Requirement]) -> dict[str, t.Union[str, dict[str, str], list[str], None, t.Type[Sentinel]]]
+ # type: (Collection) -> dict[str, t.Union[str, dict[str, str], list[str], None, t.Type[Sentinel]]]
"""Extract meta from the given on-disk collection artifact."""
try: # FIXME: use unique collection identifier as a cache key?
return self._artifact_meta_cache[collection.src]
@@ -311,13 +309,10 @@ class ConcreteArtifactsManager:
try:
collection_meta = _get_meta_from_dir(b_artifact_path, self.require_build_metadata)
except LookupError as lookup_err:
- raise_from(
- AnsibleError(
- 'Failed to find the collection dir deps: {err!s}'.
- format(err=to_native(lookup_err)),
- ),
- lookup_err,
- )
+ raise AnsibleError(
+ 'Failed to find the collection dir deps: {err!s}'.
+ format(err=to_native(lookup_err)),
+ ) from lookup_err
elif collection.is_scm:
collection_meta = {
'name': None,
@@ -439,29 +434,23 @@ def _extract_collection_from_git(repo_url, coll_ver, b_path):
try:
subprocess.check_call(git_clone_cmd)
except subprocess.CalledProcessError as proc_err:
- raise_from(
- AnsibleError( # should probably be LookupError
- 'Failed to clone a Git repository from `{repo_url!s}`.'.
- format(repo_url=to_native(git_url)),
- ),
- proc_err,
- )
+ raise AnsibleError( # should probably be LookupError
+ 'Failed to clone a Git repository from `{repo_url!s}`.'.
+ format(repo_url=to_native(git_url)),
+ ) from proc_err
git_switch_cmd = git_executable, 'checkout', to_text(version)
try:
subprocess.check_call(git_switch_cmd, cwd=b_checkout_path)
except subprocess.CalledProcessError as proc_err:
- raise_from(
- AnsibleError( # should probably be LookupError
- 'Failed to switch a cloned Git repo `{repo_url!s}` '
- 'to the requested revision `{commitish!s}`.'.
- format(
- commitish=to_native(version),
- repo_url=to_native(git_url),
- ),
+ raise AnsibleError( # should probably be LookupError
+ 'Failed to switch a cloned Git repo `{repo_url!s}` '
+ 'to the requested revision `{commitish!s}`.'.
+ format(
+ commitish=to_native(version),
+ repo_url=to_native(git_url),
),
- proc_err,
- )
+ ) from proc_err
return (
os.path.join(b_checkout_path, to_bytes(fragment))
@@ -637,17 +626,14 @@ def _get_meta_from_src_dir(
try:
manifest = yaml_load(manifest_file_obj)
except yaml.error.YAMLError as yaml_err:
- raise_from(
- AnsibleError(
- "Failed to parse the galaxy.yml at '{path!s}' with "
- 'the following error:\n{err_txt!s}'.
- format(
- path=to_native(galaxy_yml),
- err_txt=to_native(yaml_err),
- ),
+ raise AnsibleError(
+ "Failed to parse the galaxy.yml at '{path!s}' with "
+ 'the following error:\n{err_txt!s}'.
+ format(
+ path=to_native(galaxy_yml),
+ err_txt=to_native(yaml_err),
),
- yaml_err,
- )
+ ) from yaml_err
if not isinstance(manifest, dict):
if require_build_metadata:
@@ -716,6 +702,11 @@ def _get_meta_from_installed_dir(
def _get_meta_from_tar(
b_path, # type: bytes
): # type: (...) -> dict[str, t.Union[str, list[str], dict[str, str], None, t.Type[Sentinel]]]
+ if not os.path.exists(b_path):
+ raise AnsibleError(
+ f"Unable to find collection artifact file at '{to_native(b_path)}'."
+ )
+
if not tarfile.is_tarfile(b_path):
raise AnsibleError(
"Collection artifact at '{path!s}' is not a valid tar file.".
diff --git a/lib/ansible/galaxy/collection/galaxy_api_proxy.py b/lib/ansible/galaxy/collection/galaxy_api_proxy.py
index 51e0c9f..64d545f 100644
--- a/lib/ansible/galaxy/collection/galaxy_api_proxy.py
+++ b/lib/ansible/galaxy/collection/galaxy_api_proxy.py
@@ -18,7 +18,7 @@ if t.TYPE_CHECKING:
)
from ansible.galaxy.api import GalaxyAPI, GalaxyError
-from ansible.module_utils._text import to_text
+from ansible.module_utils.common.text.converters import to_text
from ansible.utils.display import Display
diff --git a/lib/ansible/galaxy/data/container/README.md b/lib/ansible/galaxy/data/container/README.md
index 1b66bdb..f9b791e 100644
--- a/lib/ansible/galaxy/data/container/README.md
+++ b/lib/ansible/galaxy/data/container/README.md
@@ -3,7 +3,7 @@
Adds a <SERVICE_NAME> service to your [Ansible Container](https://github.com/ansible/ansible-container) project. Run the following commands
to install the service:
-```
+```shell
# Set the working directory to your Ansible Container project root
$ cd myproject
@@ -15,7 +15,8 @@ $ ansible-container install <USERNAME.ROLE_NAME>
- [Ansible Container](https://github.com/ansible/ansible-container)
- An existing Ansible Container project. To create a project, simply run the following:
- ```
+
+ ```shell
# Create an empty project directory
$ mkdir myproject
@@ -28,7 +29,6 @@ $ ansible-container install <USERNAME.ROLE_NAME>
- Continue listing any prerequisites here...
-
## Role Variables
A description of the settable variables for this role should go here, including any variables that are in defaults/main.yml, vars/main.yml, and any variables that can/should be set
@@ -45,5 +45,3 @@ BSD
## Author Information
An optional section for the role authors to include contact information, or a website (HTML is not allowed).
-
-
diff --git a/lib/ansible/galaxy/dependency_resolution/__init__.py b/lib/ansible/galaxy/dependency_resolution/__init__.py
index cfde7df..eeffd29 100644
--- a/lib/ansible/galaxy/dependency_resolution/__init__.py
+++ b/lib/ansible/galaxy/dependency_resolution/__init__.py
@@ -13,10 +13,7 @@ if t.TYPE_CHECKING:
from ansible.galaxy.collection.concrete_artifact_manager import (
ConcreteArtifactsManager,
)
- from ansible.galaxy.dependency_resolution.dataclasses import (
- Candidate,
- Requirement,
- )
+ from ansible.galaxy.dependency_resolution.dataclasses import Candidate
from ansible.galaxy.collection.galaxy_api_proxy import MultiGalaxyAPIProxy
from ansible.galaxy.dependency_resolution.providers import CollectionDependencyProvider
@@ -27,7 +24,6 @@ from ansible.galaxy.dependency_resolution.resolvers import CollectionDependencyR
def build_collection_dependency_resolver(
galaxy_apis, # type: t.Iterable[GalaxyAPI]
concrete_artifacts_manager, # type: ConcreteArtifactsManager
- user_requirements, # type: t.Iterable[Requirement]
preferred_candidates=None, # type: t.Iterable[Candidate]
with_deps=True, # type: bool
with_pre_releases=False, # type: bool
@@ -44,7 +40,6 @@ def build_collection_dependency_resolver(
CollectionDependencyProvider(
apis=MultiGalaxyAPIProxy(galaxy_apis, concrete_artifacts_manager, offline=offline),
concrete_artifacts_manager=concrete_artifacts_manager,
- user_requirements=user_requirements,
preferred_candidates=preferred_candidates,
with_deps=with_deps,
with_pre_releases=with_pre_releases,
diff --git a/lib/ansible/galaxy/dependency_resolution/dataclasses.py b/lib/ansible/galaxy/dependency_resolution/dataclasses.py
index 35b6505..7e8fb57 100644
--- a/lib/ansible/galaxy/dependency_resolution/dataclasses.py
+++ b/lib/ansible/galaxy/dependency_resolution/dataclasses.py
@@ -29,7 +29,8 @@ if t.TYPE_CHECKING:
from ansible.errors import AnsibleError, AnsibleAssertionError
from ansible.galaxy.api import GalaxyAPI
-from ansible.module_utils._text import to_bytes, to_native, to_text
+from ansible.galaxy.collection import HAS_PACKAGING, PkgReq
+from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
from ansible.module_utils.common.arg_spec import ArgumentSpecValidator
from ansible.utils.collection_loader import AnsibleCollectionRef
from ansible.utils.display import Display
@@ -215,10 +216,15 @@ class _ComputedReqKindsMixin:
return cls.from_dir_path_implicit(dir_path)
@classmethod
- def from_dir_path(cls, dir_path, art_mgr):
+ def from_dir_path( # type: ignore[misc]
+ cls, # type: t.Type[Collection]
+ dir_path, # type: bytes
+ art_mgr, # type: ConcreteArtifactsManager
+ ): # type: (...) -> Collection
"""Make collection from an directory with metadata."""
- b_dir_path = to_bytes(dir_path, errors='surrogate_or_strict')
- if not _is_collection_dir(b_dir_path):
+ if dir_path.endswith(to_bytes(os.path.sep)):
+ dir_path = dir_path.rstrip(to_bytes(os.path.sep))
+ if not _is_collection_dir(dir_path):
display.warning(
u"Collection at '{path!s}' does not have a {manifest_json!s} "
u'file, nor has it {galaxy_yml!s}: cannot detect version.'.
@@ -267,6 +273,8 @@ class _ComputedReqKindsMixin:
regardless of whether any of known metadata files are present.
"""
# There is no metadata, but it isn't required for a functional collection. Determine the namespace.name from the path.
+ if dir_path.endswith(to_bytes(os.path.sep)):
+ dir_path = dir_path.rstrip(to_bytes(os.path.sep))
u_dir_path = to_text(dir_path, errors='surrogate_or_strict')
path_list = u_dir_path.split(os.path.sep)
req_name = '.'.join(path_list[-2:])
@@ -275,13 +283,25 @@ class _ComputedReqKindsMixin:
@classmethod
def from_string(cls, collection_input, artifacts_manager, supplemental_signatures):
req = {}
- if _is_concrete_artifact_pointer(collection_input):
- # Arg is a file path or URL to a collection
+ if _is_concrete_artifact_pointer(collection_input) or AnsibleCollectionRef.is_valid_collection_name(collection_input):
+ # Arg is a file path or URL to a collection, or just a collection
req['name'] = collection_input
- else:
+ elif ':' in collection_input:
req['name'], _sep, req['version'] = collection_input.partition(':')
if not req['version']:
del req['version']
+ else:
+ if not HAS_PACKAGING:
+ raise AnsibleError("Failed to import packaging, check that a supported version is installed")
+ try:
+ pkg_req = PkgReq(collection_input)
+ except Exception as e:
+ # packaging doesn't know what this is, let it fly, better errors happen in from_requirement_dict
+ req['name'] = collection_input
+ else:
+ req['name'] = pkg_req.name
+ if pkg_req.specifier:
+ req['version'] = to_text(pkg_req.specifier)
req['signatures'] = supplemental_signatures
return cls.from_requirement_dict(req, artifacts_manager)
@@ -414,6 +434,9 @@ class _ComputedReqKindsMixin:
format(not_url=req_source.api_server),
)
+ if req_type == 'dir' and req_source.endswith(os.path.sep):
+ req_source = req_source.rstrip(os.path.sep)
+
tmp_inst_req = cls(req_name, req_version, req_source, req_type, req_signature_sources)
if req_type not in {'galaxy', 'subdirs'} and req_name is None:
@@ -440,8 +463,8 @@ class _ComputedReqKindsMixin:
def __unicode__(self):
if self.fqcn is None:
return (
- f'{self.type} collection from a Git repo' if self.is_scm
- else f'{self.type} collection from a namespace'
+ u'"virtual collection Git repo"' if self.is_scm
+ else u'"virtual collection namespace"'
)
return (
@@ -481,14 +504,14 @@ class _ComputedReqKindsMixin:
@property
def namespace(self):
if self.is_virtual:
- raise TypeError(f'{self.type} collections do not have a namespace')
+ raise TypeError('Virtual collections do not have a namespace')
return self._get_separate_ns_n_name()[0]
@property
def name(self):
if self.is_virtual:
- raise TypeError(f'{self.type} collections do not have a name')
+ raise TypeError('Virtual collections do not have a name')
return self._get_separate_ns_n_name()[-1]
@@ -542,6 +565,27 @@ class _ComputedReqKindsMixin:
return not self.is_concrete_artifact
@property
+ def is_pinned(self):
+ """Indicate if the version set is considered pinned.
+
+ This essentially computes whether the version field of the current
+ requirement explicitly requests a specific version and not an allowed
+ version range.
+
+ It is then used to help the resolvelib-based dependency resolver judge
+ whether it's acceptable to consider a pre-release candidate version
+ despite pre-release installs not being requested by the end-user
+ explicitly.
+
+ See https://github.com/ansible/ansible/pull/81606 for extra context.
+ """
+ version_string = self.ver[0]
+ return version_string.isdigit() or not (
+ version_string == '*' or
+ version_string.startswith(('<', '>', '!='))
+ )
+
+ @property
def source_info(self):
return self._source_info
diff --git a/lib/ansible/galaxy/dependency_resolution/errors.py b/lib/ansible/galaxy/dependency_resolution/errors.py
index ae3b439..acd8857 100644
--- a/lib/ansible/galaxy/dependency_resolution/errors.py
+++ b/lib/ansible/galaxy/dependency_resolution/errors.py
@@ -7,7 +7,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
try:
- from resolvelib.resolvers import (
+ from resolvelib.resolvers import ( # pylint: disable=unused-import
ResolutionImpossible as CollectionDependencyResolutionImpossible,
InconsistentCandidate as CollectionDependencyInconsistentCandidate,
)
diff --git a/lib/ansible/galaxy/dependency_resolution/providers.py b/lib/ansible/galaxy/dependency_resolution/providers.py
index 6ad1de8..f13d3ec 100644
--- a/lib/ansible/galaxy/dependency_resolution/providers.py
+++ b/lib/ansible/galaxy/dependency_resolution/providers.py
@@ -40,7 +40,7 @@ except ImportError:
# TODO: add python requirements to ansible-test's ansible-core distribution info and remove the hardcoded lowerbound/upperbound fallback
RESOLVELIB_LOWERBOUND = SemanticVersion("0.5.3")
-RESOLVELIB_UPPERBOUND = SemanticVersion("0.9.0")
+RESOLVELIB_UPPERBOUND = SemanticVersion("1.1.0")
RESOLVELIB_VERSION = SemanticVersion.from_loose_version(LooseVersion(resolvelib_version))
@@ -51,7 +51,6 @@ class CollectionDependencyProviderBase(AbstractProvider):
self, # type: CollectionDependencyProviderBase
apis, # type: MultiGalaxyAPIProxy
concrete_artifacts_manager=None, # type: ConcreteArtifactsManager
- user_requirements=None, # type: t.Iterable[Requirement]
preferred_candidates=None, # type: t.Iterable[Candidate]
with_deps=True, # type: bool
with_pre_releases=False, # type: bool
@@ -87,58 +86,12 @@ class CollectionDependencyProviderBase(AbstractProvider):
Requirement.from_requirement_dict,
art_mgr=concrete_artifacts_manager,
)
- self._pinned_candidate_requests = set(
- # NOTE: User-provided signatures are supplemental, so signatures
- # NOTE: are not used to determine if a candidate is user-requested
- Candidate(req.fqcn, req.ver, req.src, req.type, None)
- for req in (user_requirements or ())
- if req.is_concrete_artifact or (
- req.ver != '*' and
- not req.ver.startswith(('<', '>', '!='))
- )
- )
self._preferred_candidates = set(preferred_candidates or ())
self._with_deps = with_deps
self._with_pre_releases = with_pre_releases
self._upgrade = upgrade
self._include_signatures = include_signatures
- def _is_user_requested(self, candidate): # type: (Candidate) -> bool
- """Check if the candidate is requested by the user."""
- if candidate in self._pinned_candidate_requests:
- return True
-
- if candidate.is_online_index_pointer and candidate.src is not None:
- # NOTE: Candidate is a namedtuple, it has a source server set
- # NOTE: to a specific GalaxyAPI instance or `None`. When the
- # NOTE: user runs
- # NOTE:
- # NOTE: $ ansible-galaxy collection install ns.coll
- # NOTE:
- # NOTE: then it's saved in `self._pinned_candidate_requests`
- # NOTE: as `('ns.coll', '*', None, 'galaxy')` but then
- # NOTE: `self.find_matches()` calls `self.is_satisfied_by()`
- # NOTE: with Candidate instances bound to each specific
- # NOTE: server available, those look like
- # NOTE: `('ns.coll', '*', GalaxyAPI(...), 'galaxy')` and
- # NOTE: wouldn't match the user requests saved in
- # NOTE: `self._pinned_candidate_requests`. This is why we
- # NOTE: normalize the collection to have `src=None` and try
- # NOTE: again.
- # NOTE:
- # NOTE: When the user request comes from `requirements.yml`
- # NOTE: with the `source:` set, it'll match the first check
- # NOTE: but it still can have entries with `src=None` so this
- # NOTE: normalized check is still necessary.
- # NOTE:
- # NOTE: User-provided signatures are supplemental, so signatures
- # NOTE: are not used to determine if a candidate is user-requested
- return Candidate(
- candidate.fqcn, candidate.ver, None, candidate.type, None
- ) in self._pinned_candidate_requests
-
- return False
-
def identify(self, requirement_or_candidate):
# type: (t.Union[Candidate, Requirement]) -> str
"""Given requirement or candidate, return an identifier for it.
@@ -190,7 +143,7 @@ class CollectionDependencyProviderBase(AbstractProvider):
Mapping of identifier, list of named tuple pairs.
The named tuples have the entries ``requirement`` and ``parent``.
- resolvelib >=0.8.0, <= 0.8.1
+ resolvelib >=0.8.0, <= 1.0.1
:param identifier: The value returned by ``identify()``.
@@ -342,25 +295,79 @@ class CollectionDependencyProviderBase(AbstractProvider):
latest_matches = []
signatures = []
extra_signature_sources = [] # type: list[str]
+
+ discarding_pre_releases_acceptable = any(
+ not is_pre_release(candidate_version)
+ for candidate_version, _src_server in coll_versions
+ )
+
+ # NOTE: The optimization of conditionally looping over the requirements
+ # NOTE: is used to skip having to compute the pinned status of all
+ # NOTE: requirements and apply version normalization to the found ones.
+ all_pinned_requirement_version_numbers = {
+ # NOTE: Pinned versions can start with a number, but also with an
+ # NOTE: equals sign. Stripping it at the beginning should be
+ # NOTE: enough. If there's a space after equals, the second strip
+ # NOTE: will take care of it.
+ # NOTE: Without this conversion, requirements versions like
+ # NOTE: '1.2.3-alpha.4' work, but '=1.2.3-alpha.4' don't.
+ requirement.ver.lstrip('=').strip()
+ for requirement in requirements
+ if requirement.is_pinned
+ } if discarding_pre_releases_acceptable else set()
+
for version, src_server in coll_versions:
tmp_candidate = Candidate(fqcn, version, src_server, 'galaxy', None)
- unsatisfied = False
for requirement in requirements:
- unsatisfied |= not self.is_satisfied_by(requirement, tmp_candidate)
+ candidate_satisfies_requirement = self.is_satisfied_by(
+ requirement, tmp_candidate,
+ )
+ if not candidate_satisfies_requirement:
+ break
+
+ should_disregard_pre_release_candidate = (
+ # NOTE: Do not discard pre-release candidates in the
+ # NOTE: following cases:
+ # NOTE: * the end-user requested pre-releases explicitly;
+ # NOTE: * the candidate is a concrete artifact (e.g. a
+ # NOTE: Git repository, subdirs, a tarball URL, or a
+ # NOTE: local dir or file etc.);
+ # NOTE: * the candidate's pre-release version exactly
+ # NOTE: matches a version specifically requested by one
+ # NOTE: of the requirements in the current match
+ # NOTE: discovery round (i.e. matching a requirement
+ # NOTE: that is not a range but an explicit specific
+ # NOTE: version pin). This works when some requirements
+ # NOTE: request version ranges but others (possibly on
+ # NOTE: different dependency tree level depths) demand
+ # NOTE: pre-release dependency versions, even if those
+ # NOTE: dependencies are transitive.
+ is_pre_release(tmp_candidate.ver)
+ and discarding_pre_releases_acceptable
+ and not (
+ self._with_pre_releases
+ or tmp_candidate.is_concrete_artifact
+ or version in all_pinned_requirement_version_numbers
+ )
+ )
+ if should_disregard_pre_release_candidate:
+ break
+
# FIXME
- # unsatisfied |= not self.is_satisfied_by(requirement, tmp_candidate) or not (
- # requirement.src is None or # if this is true for some candidates but not all it will break key param - Nonetype can't be compared to str
+ # candidate_is_from_requested_source = (
+ # requirement.src is None # if this is true for some candidates but not all it will break key param - Nonetype can't be compared to str
# or requirement.src == candidate.src
# )
- if unsatisfied:
- break
+ # if not candidate_is_from_requested_source:
+ # break
+
if not self._include_signatures:
continue
extra_signature_sources.extend(requirement.signature_sources or [])
- if not unsatisfied:
+ else: # candidate satisfies requirements, `break` never happened
if self._include_signatures:
for extra_source in extra_signature_sources:
signatures.append(get_signature_from_source(extra_source))
@@ -405,21 +412,6 @@ class CollectionDependencyProviderBase(AbstractProvider):
:returns: Indication whether the `candidate` is a viable \
solution to the `requirement`.
"""
- # NOTE: Only allow pre-release candidates if we want pre-releases
- # NOTE: or the req ver was an exact match with the pre-release
- # NOTE: version. Another case where we'd want to allow
- # NOTE: pre-releases is when there are several user requirements
- # NOTE: and one of them is a pre-release that also matches a
- # NOTE: transitive dependency of another requirement.
- allow_pre_release = self._with_pre_releases or not (
- requirement.ver == '*' or
- requirement.ver.startswith('<') or
- requirement.ver.startswith('>') or
- requirement.ver.startswith('!=')
- ) or self._is_user_requested(candidate)
- if is_pre_release(candidate.ver) and not allow_pre_release:
- return False
-
# NOTE: This is a set of Pipenv-inspired optimizations. Ref:
# https://github.com/sarugaku/passa/blob/2ac00f1/src/passa/models/providers.py#L58-L74
if (
diff --git a/lib/ansible/galaxy/role.py b/lib/ansible/galaxy/role.py
index 9eb6e7b..e7c5e01 100644
--- a/lib/ansible/galaxy/role.py
+++ b/lib/ansible/galaxy/role.py
@@ -36,12 +36,13 @@ from ansible import context
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.galaxy.api import GalaxyAPI
from ansible.galaxy.user_agent import user_agent
-from ansible.module_utils._text import to_native, to_text
+from ansible.module_utils.common.text.converters import to_native, to_text
from ansible.module_utils.common.yaml import yaml_dump, yaml_load
from ansible.module_utils.compat.version import LooseVersion
from ansible.module_utils.urls import open_url
from ansible.playbook.role.requirement import RoleRequirement
from ansible.utils.display import Display
+from ansible.utils.path import is_subpath, unfrackpath
display = Display()
@@ -211,7 +212,7 @@ class GalaxyRole(object):
info = dict(
version=self.version,
- install_date=datetime.datetime.utcnow().strftime("%c"),
+ install_date=datetime.datetime.now(datetime.timezone.utc).strftime("%c"),
)
if not os.path.exists(os.path.join(self.path, 'meta')):
os.makedirs(os.path.join(self.path, 'meta'))
@@ -393,43 +394,41 @@ class GalaxyRole(object):
# we only extract files, and remove any relative path
# bits that might be in the file for security purposes
# and drop any containing directory, as mentioned above
- if member.isreg() or member.issym():
- for attr in ('name', 'linkname'):
- attr_value = getattr(member, attr, None)
- if not attr_value:
- continue
- n_attr_value = to_native(attr_value)
- n_archive_parent_dir = to_native(archive_parent_dir)
- n_parts = n_attr_value.replace(n_archive_parent_dir, "", 1).split(os.sep)
- n_final_parts = []
- for n_part in n_parts:
- # TODO if the condition triggers it produces a broken installation.
- # It will create the parent directory as an empty file and will
- # explode if the directory contains valid files.
- # Leaving this as is since the whole module needs a rewrite.
- #
- # Check if we have any files with illegal names,
- # and display a warning if so. This could help users
- # to debug a broken installation.
- if not n_part:
- continue
- if n_part == '..':
- display.warning(f"Illegal filename '{n_part}': '..' is not allowed")
- continue
- if n_part.startswith('~'):
- display.warning(f"Illegal filename '{n_part}': names cannot start with '~'")
- continue
- if '$' in n_part:
- display.warning(f"Illegal filename '{n_part}': names cannot contain '$'")
- continue
- n_final_parts.append(n_part)
- setattr(member, attr, os.path.join(*n_final_parts))
-
- if _check_working_data_filter():
- # deprecated: description='extract fallback without filter' python_version='3.11'
- role_tar_file.extract(member, to_native(self.path), filter='data') # type: ignore[call-arg]
+ if not (member.isreg() or member.issym()):
+ continue
+
+ for attr in ('name', 'linkname'):
+ if not (attr_value := getattr(member, attr, None)):
+ continue
+
+ if attr_value.startswith(os.sep) and not is_subpath(attr_value, archive_parent_dir):
+ err = f"Invalid {attr} for tarfile member: path {attr_value} is not a subpath of the role {archive_parent_dir}"
+ raise AnsibleError(err)
+
+ if attr == 'linkname':
+ # Symlinks are relative to the link
+ relative_to_archive_dir = os.path.dirname(getattr(member, 'name', ''))
+ archive_dir_path = os.path.join(archive_parent_dir, relative_to_archive_dir, attr_value)
else:
- role_tar_file.extract(member, to_native(self.path))
+ # Normalize paths that start with the archive dir
+ attr_value = attr_value.replace(archive_parent_dir, "", 1)
+ attr_value = os.path.join(*attr_value.split(os.sep)) # remove leading os.sep
+ archive_dir_path = os.path.join(archive_parent_dir, attr_value)
+
+ resolved_archive = unfrackpath(archive_parent_dir)
+ resolved_path = unfrackpath(archive_dir_path)
+ if not is_subpath(resolved_path, resolved_archive):
+ err = f"Invalid {attr} for tarfile member: path {resolved_path} is not a subpath of the role {resolved_archive}"
+ raise AnsibleError(err)
+
+ relative_path = os.path.join(*resolved_path.replace(resolved_archive, "", 1).split(os.sep)) or '.'
+ setattr(member, attr, relative_path)
+
+ if _check_working_data_filter():
+ # deprecated: description='extract fallback without filter' python_version='3.11'
+ role_tar_file.extract(member, to_native(self.path), filter='data') # type: ignore[call-arg]
+ else:
+ role_tar_file.extract(member, to_native(self.path))
# write out the install info file for later use
self._write_galaxy_install_info()
diff --git a/lib/ansible/galaxy/token.py b/lib/ansible/galaxy/token.py
index 4455fd0..313d007 100644
--- a/lib/ansible/galaxy/token.py
+++ b/lib/ansible/galaxy/token.py
@@ -28,7 +28,7 @@ from stat import S_IRUSR, S_IWUSR
from ansible import constants as C
from ansible.galaxy.user_agent import user_agent
-from ansible.module_utils._text import to_bytes, to_native, to_text
+from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
from ansible.module_utils.common.yaml import yaml_dump, yaml_load
from ansible.module_utils.urls import open_url
from ansible.utils.display import Display
@@ -69,7 +69,7 @@ class KeycloakToken(object):
# - build a request to POST to auth_url
# - body is form encoded
- # - 'request_token' is the offline token stored in ansible.cfg
+ # - 'refresh_token' is the offline token stored in ansible.cfg
# - 'grant_type' is 'refresh_token'
# - 'client_id' is 'cloud-services'
# - should probably be based on the contents of the