summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-04 00:24:37 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-04 00:24:37 +0000
commit1022b2cebe73db426241c2f420d4ee9f6f3c1bed (patch)
treea5c38ccfaa66e8a52767dec01d3598b67a7422a8
parentInitial commit. (diff)
downloadpython-ansible-compat-1022b2cebe73db426241c2f420d4ee9f6f3c1bed.tar.xz
python-ansible-compat-1022b2cebe73db426241c2f420d4ee9f6f3c1bed.zip
Adding upstream version 4.1.11.upstream/4.1.11
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '')
-rw-r--r--.git_archival.txt4
-rw-r--r--.gitattributes7
-rw-r--r--.github/CODEOWNERS1
-rw-r--r--.github/CODE_OF_CONDUCT.md4
-rw-r--r--.github/dependabot.yml23
-rw-r--r--.github/release-drafter.yml3
-rw-r--r--.github/workflows/ack.yml9
-rw-r--r--.github/workflows/push.yml12
-rw-r--r--.github/workflows/release.yml39
-rw-r--r--.github/workflows/tox.yml110
-rw-r--r--.gitignore136
-rw-r--r--.packit.yaml42
-rw-r--r--.pre-commit-config.yaml131
-rw-r--r--.prettierignore4
-rw-r--r--.prettierrc.yaml18
-rw-r--r--.readthedocs.yml45
-rw-r--r--.vscode/extensions.json19
-rw-r--r--.vscode/settings.json42
-rw-r--r--.yamllint8
-rw-r--r--LICENSE21
-rw-r--r--README.md12
-rw-r--r--ansible.cfg3
-rw-r--r--codecov.yml6
-rw-r--r--docs/api.md13
-rw-r--r--docs/images/favicon.icobin0 -> 15406 bytes
-rw-r--r--docs/images/logo.pngbin0 -> 10993 bytes
-rw-r--r--docs/images/logo.svg7
-rw-r--r--docs/index.md16
-rw-r--r--examples/reqs_broken/requirements.yml4
-rw-r--r--examples/reqs_v1/requirements.yml4
-rw-r--r--examples/reqs_v2/community-molecule-0.1.0.tar.gzbin0 -> 7615 bytes
-rw-r--r--examples/reqs_v2/requirements.yml21
-rw-r--r--mkdocs.yml90
-rw-r--r--pyproject.toml165
-rw-r--r--readthedocs.yml20
-rw-r--r--requirements.txt334
-rw-r--r--src/ansible_compat/__init__.py9
-rw-r--r--src/ansible_compat/config.py465
-rw-r--r--src/ansible_compat/constants.py42
-rw-r--r--src/ansible_compat/errors.py57
-rw-r--r--src/ansible_compat/loaders.py30
-rw-r--r--src/ansible_compat/ports.py4
-rw-r--r--src/ansible_compat/prerun.py21
-rw-r--r--src/ansible_compat/py.typed0
-rw-r--r--src/ansible_compat/runtime.py961
-rw-r--r--src/ansible_compat/schema.py110
-rw-r--r--src/ansible_compat/types.py23
-rw-r--r--test/__init__.py1
-rw-r--r--test/assets/galaxy_paths/.bar/galaxy.yml0
-rw-r--r--test/assets/galaxy_paths/foo/galaxy.yml0
-rw-r--r--test/assets/requirements-invalid-collection.yml3
-rw-r--r--test/assets/requirements-invalid-role.yml3
-rw-r--r--test/assets/validate0_data.json1
-rw-r--r--test/assets/validate0_expected.json22
-rw-r--r--test/assets/validate0_schema.json9
-rw-r--r--test/collections/acme.broken/galaxy.yml1
-rw-r--r--test/collections/acme.goodies/galaxy.yml34
-rw-r--r--test/collections/acme.goodies/molecule/default/converge.yml7
-rw-r--r--test/collections/acme.goodies/molecule/default/molecule.yml11
-rw-r--r--test/collections/acme.goodies/roles/baz/molecule/deep_scenario/converge.yml7
-rw-r--r--test/collections/acme.goodies/roles/baz/molecule/deep_scenario/molecule.yml11
-rw-r--r--test/collections/acme.goodies/roles/baz/tasks/main.yml3
-rw-r--r--test/collections/acme.goodies/tests/requirements.yml3
-rw-r--r--test/collections/acme.minimal/galaxy.yml30
-rw-r--r--test/conftest.py127
-rw-r--r--test/roles/acme.missing_deps/meta/main.yml8
-rw-r--r--test/roles/acme.missing_deps/requirements.yml2
-rw-r--r--test/roles/acme.sample2/meta/main.yml16
-rw-r--r--test/roles/ansible-role-sample/meta/main.yml16
-rw-r--r--test/roles/sample3/meta/main.yml16
-rw-r--r--test/roles/sample4/meta/main.yml16
-rw-r--r--test/test_api.py5
-rw-r--r--test/test_config.py86
-rw-r--r--test/test_configuration_example.py12
-rw-r--r--test/test_loaders.py9
-rw-r--r--test/test_prerun.py11
-rw-r--r--test/test_runtime.py893
-rw-r--r--test/test_runtime_example.py24
-rw-r--r--test/test_runtime_scan_path.py102
-rw-r--r--test/test_schema.py73
-rwxr-xr-xtools/get-version.sh7
-rwxr-xr-xtools/update-version.sh7
-rw-r--r--tox.ini191
83 files changed, 4862 insertions, 0 deletions
diff --git a/.git_archival.txt b/.git_archival.txt
new file mode 100644
index 0000000..242fcd6
--- /dev/null
+++ b/.git_archival.txt
@@ -0,0 +1,4 @@
+node: e92db9f0811206312edf621372de35ea9cff719f
+node-date: 2024-01-10T12:47:29+00:00
+describe-name: v4.1.11
+ref-names: HEAD -> main, tag: v4.1.11
diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 0000000..2e46433
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1,7 @@
+# Force LF line endings for text files
+* text=auto eol=lf
+
+*.png binary
+
+# Needed for setuptools-scm-git-archive
+.git_archival.txt export-subst
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 0000000..d1f5d6b
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1 @@
+* @ansible/devtools
diff --git a/.github/CODE_OF_CONDUCT.md b/.github/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000..bf829d8
--- /dev/null
+++ b/.github/CODE_OF_CONDUCT.md
@@ -0,0 +1,4 @@
+# Community Code of Conduct
+
+Please see the official
+[Ansible Community Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html).
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000..45874a7
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,23 @@
+---
+# Until bug below is sorted we will not allow dependabot to run by itself
+# https://github.com/dependabot/dependabot-core/issues/369
+version: 2
+updates:
+ - package-ecosystem: pip
+ directory: /
+ schedule:
+ day: sunday
+ interval: weekly
+ labels:
+ - dependabot-deps-updates
+ - skip-changelog
+ versioning-strategy: lockfile-only
+ open-pull-requests-limit: 0 # neutered
+ - package-ecosystem: "github-actions"
+ directory: "/"
+ schedule:
+ interval: weekly
+ open-pull-requests-limit: 3
+ labels:
+ - "dependencies"
+ - "skip-changelog"
diff --git a/.github/release-drafter.yml b/.github/release-drafter.yml
new file mode 100644
index 0000000..b2c18a9
--- /dev/null
+++ b/.github/release-drafter.yml
@@ -0,0 +1,3 @@
+---
+# see https://github.com/ansible/team-devtools
+_extends: ansible/team-devtools
diff --git a/.github/workflows/ack.yml b/.github/workflows/ack.yml
new file mode 100644
index 0000000..bdf02b5
--- /dev/null
+++ b/.github/workflows/ack.yml
@@ -0,0 +1,9 @@
+# See https://github.com/ansible/devtools/blob/main/.github/workflows/ack.yml
+name: ack
+on:
+ pull_request_target:
+ types: [opened, labeled, unlabeled, synchronize]
+
+jobs:
+ ack:
+ uses: ansible/devtools/.github/workflows/ack.yml@main
diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml
new file mode 100644
index 0000000..c0c8e95
--- /dev/null
+++ b/.github/workflows/push.yml
@@ -0,0 +1,12 @@
+# See https://github.com/ansible/devtools/blob/main/.github/workflows/push.yml
+name: push
+on:
+ push:
+ branches:
+ - main
+ - "releases/**"
+ - "stable/**"
+
+jobs:
+ ack:
+ uses: ansible/devtools/.github/workflows/push.yml@main
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
new file mode 100644
index 0000000..b05de88
--- /dev/null
+++ b/.github/workflows/release.yml
@@ -0,0 +1,39 @@
+name: release
+
+on:
+ release:
+ types: [published]
+
+jobs:
+ before-release:
+ uses: ansible/ansible-compat/.github/workflows/tox.yml@main
+
+ release:
+ name: release ${{ github.event.ref }}
+ needs: before-release
+ # unable to use environment with uses/with, basically cannot reuse release pipelines
+ environment: release
+ runs-on: ubuntu-22.04
+ permissions:
+ id-token: write
+
+ env:
+ FORCE_COLOR: 1
+ PY_COLORS: 1
+ TOX_PARALLEL_NO_SPINNER: 1
+
+ steps:
+ - name: Switch to using Python 3.12 by default
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.12"
+ - name: Install tox
+ run: python3 -m pip install --user "tox>=4.0.0"
+ - name: Check out src from Git
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0 # needed by setuptools-scm
+ - name: Build dists
+ run: python -m tox -e pkg
+ - name: Publish to pypi.org
+ uses: pypa/gh-action-pypi-publish@unstable/v1
diff --git a/.github/workflows/tox.yml b/.github/workflows/tox.yml
new file mode 100644
index 0000000..f56f266
--- /dev/null
+++ b/.github/workflows/tox.yml
@@ -0,0 +1,110 @@
+name: tox
+on:
+ create: # is used for publishing to PyPI and TestPyPI
+ tags: # any tag regardless of its name, no branches
+ - "**"
+ push: # only publishes pushes to the main branch to TestPyPI
+ branches: # any integration branch but not tag
+ - "main"
+ pull_request:
+ schedule:
+ - cron: 1 0 * * * # Run daily at 0:01 UTC
+ workflow_call:
+
+jobs:
+ pre:
+ name: pre
+ runs-on: ubuntu-22.04
+ outputs:
+ matrix: ${{ steps.generate_matrix.outputs.matrix }}
+ steps:
+ - name: Determine matrix
+ id: generate_matrix
+ uses: coactions/dynamic-matrix@v1
+ with:
+ min_python: "3.9"
+ max_python: "3.12"
+ default_python: "3.10"
+ other_names: |
+ lint
+ docs
+ pkg
+ py39-ansible212
+ py39-ansible213
+ py39-ansible214
+ py39-ansible215
+ py310-ansible215
+ py311-ansible215
+ py312-ansible216
+ py312-devel
+ smoke
+ platforms: linux,macos
+ macos: minmax
+ build:
+ name: ${{ matrix.name }}
+ runs-on: ${{ matrix.os || 'ubuntu-22.04' }}
+ needs: pre
+ strategy:
+ fail-fast: false
+ matrix: ${{ fromJson(needs.pre.outputs.matrix) }}
+ env:
+ FORCE_COLOR: 1
+
+ steps:
+ - name: Check out src from Git
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0 # needed by setuptools-scm
+
+ - name: Set up Python ${{ matrix.python_version }}
+ uses: actions/setup-python@v5
+ with:
+ python-version: ${{ matrix.python_version }}
+
+ - name: Pre-commit cache
+ uses: actions/cache@v3
+ with:
+ path: ~/.cache/pre-commit
+ key: ${{ matrix.name }}-pre-commit-${{ hashFiles('setup.cfg', 'tox.ini', 'pyproject.toml', '.pre-commit-config.yaml') }}
+
+ - name: Pip cache
+ uses: actions/cache@v3
+ with:
+ path: ~/.cache/pip
+ key: ${{ matrix.name }}-pip-${{ hashFiles('setup.cfg', 'tox.ini', 'pyproject.toml', '.pre-commit-config.yaml') }}
+
+ - name: Install tox
+ run: python3 -m pip install --upgrade 'tox>=4.0.3'
+
+ - name: Initialize tox envs
+ run: python -m tox --notest --skip-missing-interpreters false -vv -e ${{ matrix.passed_name }}
+
+ - name: Test with tox
+ run: python -m tox -e ${{ matrix.passed_name }}
+
+ - name: Archive logs
+ uses: actions/upload-artifact@v3
+ with:
+ name: logs.zip
+ path: .tox/**/log/
+
+ - name: Upload coverage data
+ if: ${{ startsWith(matrix.passed_name, 'py') }}
+ uses: codecov/codecov-action@v3
+ with:
+ name: ${{ matrix.passed_name }}
+ fail_ci_if_error: false # see https://github.com/codecov/codecov-action/issues/598
+ token: ${{ secrets.CODECOV_TOKEN }}
+ verbose: true # optional (default = false)
+
+ check: # This job does nothing and is only used for the branch protection
+ if: always()
+
+ needs:
+ - build
+ runs-on: ubuntu-22.04
+ steps:
+ - name: Decide whether the needed jobs succeeded or failed
+ uses: re-actors/alls-green@release/v1
+ with:
+ jobs: ${{ toJSON(needs) }}
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..a20c99c
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,136 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+pip-wheel-metadata/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+rpm/*.spec
+*.rpm
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+.python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+.test-results
+*.lcov
+ansible_collections
+
+# Generated by setuptools-scm
+src/ansible_compat/_version.py
diff --git a/.packit.yaml b/.packit.yaml
new file mode 100644
index 0000000..575e351
--- /dev/null
+++ b/.packit.yaml
@@ -0,0 +1,42 @@
+---
+# https://packit.dev/docs/configuration/
+# Test locally running: packit build locally
+# spell-checker:ignore packit specfile copr epel
+specfile_path: dist/python-ansible-compat.spec
+actions:
+ create-archive:
+ # packit.dev service does have these module pre-installed:
+ - python3 -m build --sdist --outdir dist
+ - sh -c "ls dist/ansible-compat-*.tar.gz"
+ get-current-version:
+ - ./tools/get-version.sh
+ post-upstream-clone:
+ - rm -f dist/*.tar.gz || true
+ - ./tools/update-version.sh
+srpm_build_deps:
+ - python3-build
+ - python3-setuptools_scm
+ - python3-pytest
+ - python3-pytest-mock
+jobs:
+ - job: copr_build
+ trigger: commit
+ branch: main
+ targets:
+ - fedora-rawhide-x86_64
+ - fedora-rawhide-aarch64
+ - fedora-latest-x86_64
+ - fedora-latest-aarch64
+ # Missing python3-build see https://bugzilla.redhat.com/show_bug.cgi?id=2129071
+ # - centos-stream-9-aarch64
+ # - centos-stream-9-x86_64
+ - job: tests
+ trigger: pull_request
+ branch: main
+ targets:
+ - fedora-latest
+ - fedora-rawhide
+ # - job: propose_downstream
+ # trigger: release
+ # metadata:
+ # dist-git-branch: master
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 0000000..a0749a0
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,131 @@
+---
+ci:
+ # format compatible with commitlint
+ autoupdate_commit_msg: "chore: pre-commit autoupdate"
+ autoupdate_schedule: monthly
+ autofix_commit_msg: |
+ chore: auto fixes from pre-commit.com hooks
+
+ for more information, see https://pre-commit.ci
+ skip:
+ # https://github.com/pre-commit-ci/issues/issues/55
+ - ccv
+ - pip-compile
+ # No docker on pre-commit.ci
+ - validate-config-in-container
+default_language_version:
+ # Needed in order to make pip-compile output predictable.
+ python: python3.10
+exclude: |
+ (?x)^(
+ test/assets/.*
+ )$
+repos:
+ - repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: "v0.1.9"
+ hooks:
+ - id: ruff
+ args: [--fix, --exit-non-zero-on-fix]
+ - repo: https://github.com/pappasam/toml-sort
+ rev: v0.23.1
+ hooks:
+ - id: toml-sort-fix
+ - repo: https://github.com/pre-commit/mirrors-prettier
+ # keep it before yamllint
+ rev: "v3.0.3"
+ hooks:
+ - id: prettier
+ additional_dependencies:
+ - prettier
+ - prettier-plugin-toml
+ - prettier-plugin-sort-json
+ - repo: https://github.com/pre-commit/pre-commit-hooks.git
+ rev: v4.5.0
+ hooks:
+ - id: end-of-file-fixer
+ - id: trailing-whitespace
+ exclude: >
+ (?x)^(
+ examples/playbooks/(with-skip-tag-id|unicode).yml|
+ examples/playbooks/example.yml
+ )$
+ - id: mixed-line-ending
+ - id: check-byte-order-marker
+ - id: check-executables-have-shebangs
+ - id: check-merge-conflict
+ - id: debug-statements
+ language_version: python3
+ - repo: https://github.com/codespell-project/codespell
+ rev: v2.2.6
+ hooks:
+ - id: codespell
+ - repo: https://github.com/adrienverge/yamllint.git
+ rev: v1.33.0
+ hooks:
+ - id: yamllint
+ files: \.(yaml|yml)$
+ types: [file, yaml]
+ entry: yamllint --strict
+ - repo: https://github.com/psf/black
+ rev: 23.12.1
+ hooks:
+ - id: black
+ language_version: python3
+ - repo: https://github.com/pre-commit/mirrors-mypy
+ rev: v1.8.0
+ hooks:
+ - id: mypy
+ # empty args needed in order to match mypy cli behavior
+ args: ["--strict"]
+ additional_dependencies:
+ - ansible-core
+ - cached_property
+ - packaging
+ - pytest
+ - pytest-mock
+ - subprocess-tee>=0.4.1
+ - "typing-extensions>=4.5.0;python_version<'3.10'"
+ - types-PyYAML
+ - types-pkg_resources
+ - types-jsonschema>=4.4.9
+ - repo: https://github.com/pycqa/pylint
+ rev: v3.0.3
+ hooks:
+ - id: pylint
+ additional_dependencies:
+ - PyYAML
+ - pytest
+ - typing_extensions
+ # Keep last due to being considerably slower than the others:
+ - repo: local
+ hooks:
+ - id: pip-compile-upgrade
+ # To run it execute: `pre-commit run pip-compile-upgrade --hook-stage manual`
+ name: Upgrade constraints files and requirements
+ files: ^(pyproject\.toml|requirements\.txt)$
+ language: python
+ entry: python -m piptools compile --resolver=backtracking --upgrade -q --strip-extras --extra docs --extra test --output-file=requirements.txt pyproject.toml --unsafe-package ansible-core --unsafe-package resolvelib --unsafe-package typing_extensions
+ pass_filenames: false
+ stages:
+ - manual
+ additional_dependencies:
+ - pip-tools>=6.11.0
+ - id: pip-compile
+ name: Check constraints files and requirements
+ files: ^(pyproject\.toml|requirements\.txt)$
+ language: python
+ entry: python -m piptools compile --resolver=backtracking -q --strip-extras --extra docs --extra test --output-file=requirements.txt pyproject.toml --unsafe-package ansible-core --unsafe-package resolvelib --unsafe-package typing_extensions
+ pass_filenames: false
+ additional_dependencies:
+ - pip-tools>=6.11.0
+ - repo: https://github.com/packit/pre-commit-hooks
+ rev: v1.2.0
+ hooks:
+ - id: validate-config-in-container
+ name: packit
+ alias: packit
+ - repo: https://github.com/mashi/codecov-validator
+ rev: "1.0.1"
+ hooks:
+ - id: ccv
+ name: codecov
diff --git a/.prettierignore b/.prettierignore
new file mode 100644
index 0000000..1ea88f5
--- /dev/null
+++ b/.prettierignore
@@ -0,0 +1,4 @@
+test/assets/
+
+# Generated by setuptools-scm
+src/ansible_compat/_version.py
diff --git a/.prettierrc.yaml b/.prettierrc.yaml
new file mode 100644
index 0000000..906d3d6
--- /dev/null
+++ b/.prettierrc.yaml
@@ -0,0 +1,18 @@
+---
+proseWrap: always
+jsonRecursiveSort: true # prettier-plugin-sort-json
+tabWidth: 2
+useTabs: false
+overrides:
+ - files:
+ - "*.md"
+ options:
+ # compatibility with markdownlint
+ proseWrap: always
+ printWidth: 80
+ - files:
+ - "*.yaml"
+ - "*.yml"
+ options:
+ # compatibility with yamllint
+ proseWrap: preserve
diff --git a/.readthedocs.yml b/.readthedocs.yml
new file mode 100644
index 0000000..a36c386
--- /dev/null
+++ b/.readthedocs.yml
@@ -0,0 +1,45 @@
+# Read the Docs configuration file
+# See https://docs.readthedocs.io/en/stable/config-file/v2.html
+# for details
+
+---
+# Required
+version: 2
+
+# Build documentation in the docs/ directory with Sphinx
+sphinx:
+ # keep dirhtml for nice URLs without .html extension
+ builder: dirhtml
+ configuration: docs/conf.py
+ fail_on_warning: true
+
+# Build documentation with MkDocs
+#mkdocs:
+# configuration: mkdocs.yml
+# fail_on_warning: true
+
+# Optionally build your docs in additional formats
+# such as PDF and ePub
+formats: []
+
+submodules:
+ include: all # []
+ exclude: []
+ recursive: true
+
+build:
+ image: latest
+
+# Optionally set the version of Python and requirements required
+# to build docs
+python:
+ version: "3.9"
+ install:
+ # On https://readthedocs.org/dashboard/ansible-lint/environmentvariables/ we
+ # do have PIP_CONSTRAINTS=requirements.txt which ensures we install only
+ # pinned requirements that that we know to be working.
+ - method: pip
+ path: .
+ extra_requirements:
+ - docs
+ system_packages: false
diff --git a/.vscode/extensions.json b/.vscode/extensions.json
new file mode 100644
index 0000000..aa1e537
--- /dev/null
+++ b/.vscode/extensions.json
@@ -0,0 +1,19 @@
+{
+ "recommendations": [
+ "Tyriar.sort-lines",
+ "charliermarsh.ruff",
+ "esbenp.prettier-vscode",
+ "hbenl.vscode-test-explorer",
+ "ms-python.isort",
+ "ms-python.python",
+ "ms-python.vscode-pylance",
+ "ms-vscode.live-server",
+ "redhat.ansible",
+ "redhat.vscode-yaml",
+ "ryanluker.vscode-coverage-gutters",
+ "shardulm94.trailing-spaces",
+ "tamasfe.even-better-toml",
+ "timonwong.shellcheck",
+ "znck.grammarly"
+ ]
+}
diff --git a/.vscode/settings.json b/.vscode/settings.json
new file mode 100644
index 0000000..990033d
--- /dev/null
+++ b/.vscode/settings.json
@@ -0,0 +1,42 @@
+{
+ "[markdown]": {
+ "editor.defaultFormatter": "esbenp.prettier-vscode"
+ },
+ "[python]": {
+ "editor.codeActionsOnSave": {
+ "source.fixAll": "explicit",
+ "source.fixAll.ruff": "never",
+ "source.organizeImports": "never"
+ }
+ },
+ "editor.formatOnSave": true,
+ "evenBetterToml.formatter.alignComments": false,
+ "evenBetterToml.formatter.allowedBlankLines": 2,
+ "files.exclude": {
+ "*.egg-info": true,
+ ".pytest_cache": true,
+ ".tox": true,
+ "__pycache__": true,
+ "build": true
+ },
+ "git.ignoreLimitWarning": true,
+ "grammarly.domain": "technical",
+ "grammarly.files.include": ["**/*.txt", "**/*.md"],
+ "grammarly.hideUnavailablePremiumAlerts": true,
+ "grammarly.showExamples": true,
+ "python.analysis.exclude": ["build"],
+ "python.formatting.provider": "black",
+ "python.linting.flake8Args": ["--ignore=E501,W503"],
+ "python.linting.flake8Enabled": false,
+ "python.linting.mypyCategorySeverity.error": "Warning",
+ "python.linting.mypyEnabled": true,
+ "python.linting.pylintEnabled": true,
+ "python.terminal.activateEnvironment": true,
+ "python.testing.pytestEnabled": true,
+ "python.testing.unittestEnabled": false,
+ "sortLines.filterBlankLines": true,
+ "yaml.completion": true,
+ "yaml.customTags": ["!encrypted/pkcs1-oaep scalar", "!vault scalar"],
+ "yaml.format.enable": false,
+ "yaml.validate": true
+}
diff --git a/.yamllint b/.yamllint
new file mode 100644
index 0000000..f4a0473
--- /dev/null
+++ b/.yamllint
@@ -0,0 +1,8 @@
+rules:
+ document-start: disable
+ indentation:
+ level: error
+ indent-sequences: consistent
+ignore: |
+ .tox
+# ignore added because this file includes on-purpose errors
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..0c5bad7
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2021 Community managed Ansible repositories
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..32b44b2
--- /dev/null
+++ b/README.md
@@ -0,0 +1,12 @@
+# ansible-compat
+
+[![pypi](https://img.shields.io/pypi/v/ansible-compat.svg)](https://pypi.org/project/ansible-compat/)
+[![docs](https://readthedocs.org/projects/ansible-compat/badge/?version=latest)](https://ansible-compat.readthedocs.io/)
+[![gh](https://github.com/ansible/ansible-compat/actions/workflows/tox.yml/badge.svg)](https://github.com/ansible/ansible-compat/actions/workflows/tox.yml)
+[![codecov.io](https://codecov.io/github/ansible/ansible-compat/coverage.svg?branch=main)](https://codecov.io/github/ansible/ansible-compat?branch=main)
+
+A python package contains functions that facilitate working with various
+versions of Ansible 2.12 and newer.
+
+Documentation is available at
+[ansible-compat.readthedocs.io](https://ansible-compat.readthedocs.io/).
diff --git a/ansible.cfg b/ansible.cfg
new file mode 100644
index 0000000..c92c9ce
--- /dev/null
+++ b/ansible.cfg
@@ -0,0 +1,3 @@
+[defaults]
+# isolate testing of ansible-compat from user local setup
+collections_path = .
diff --git a/codecov.yml b/codecov.yml
new file mode 100644
index 0000000..0ba9516
--- /dev/null
+++ b/codecov.yml
@@ -0,0 +1,6 @@
+codecov:
+ require_ci_to_pass: true
+comment: false
+coverage:
+ status:
+ patch: true # we want github annotations
diff --git a/docs/api.md b/docs/api.md
new file mode 100644
index 0000000..6e68272
--- /dev/null
+++ b/docs/api.md
@@ -0,0 +1,13 @@
+# API
+
+::: ansible_compat.config
+
+::: ansible_compat.errors
+
+::: ansible_compat.loaders
+
+::: ansible_compat.prerun
+
+::: ansible_compat.runtime
+
+::: ansible_compat.schema
diff --git a/docs/images/favicon.ico b/docs/images/favicon.ico
new file mode 100644
index 0000000..ea4ebc1
--- /dev/null
+++ b/docs/images/favicon.ico
Binary files differ
diff --git a/docs/images/logo.png b/docs/images/logo.png
new file mode 100644
index 0000000..f3626b0
--- /dev/null
+++ b/docs/images/logo.png
Binary files differ
diff --git a/docs/images/logo.svg b/docs/images/logo.svg
new file mode 100644
index 0000000..ffe210b
--- /dev/null
+++ b/docs/images/logo.svg
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<svg width="100%" height="100%" viewBox="0 0 256 256" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xml:space="preserve" xmlns:serif="http://www.serif.com/" style="fill-rule:evenodd;clip-rule:evenodd;stroke-linejoin:round;stroke-miterlimit:2;">
+ <g id="ansible-aap">
+ <path d="M128,0C198.645,0 256,57.355 256,128C256,198.645 198.645,256 128,256C57.355,256 0,198.645 0,128C0,57.355 57.355,0 128,0ZM189.094,178.686L138.002,55.958C136.786,52.714 133.677,50.416 130.163,50.551C126.513,50.416 123.269,52.714 122.053,55.958L66.23,190.446L85.423,190.446L107.455,135.029L173.55,188.418C175.442,190.31 178.011,191.392 180.579,191.527C185.58,191.662 189.77,187.742 189.905,182.606L189.905,182.336C189.77,181.119 189.499,179.903 189.094,178.686ZM130.298,78.125L163.413,159.899L113.402,120.431L130.298,78.125Z" style="fill:white;"/>
+ </g>
+</svg>
diff --git a/docs/index.md b/docs/index.md
new file mode 100644
index 0000000..3a54962
--- /dev/null
+++ b/docs/index.md
@@ -0,0 +1,16 @@
+# Examples
+
+## Using Ansible runtime
+
+```python title="example.py"
+{!../test/test_runtime_example.py!}
+```
+
+## Access to Ansible configuration
+
+As you may not want to parse `ansible-config dump` yourself, you can make use of
+a simple python class that facilitates access to it, using python data types.
+
+```python
+{!../test/test_configuration_example.py!}
+```
diff --git a/examples/reqs_broken/requirements.yml b/examples/reqs_broken/requirements.yml
new file mode 100644
index 0000000..d55cb09
--- /dev/null
+++ b/examples/reqs_broken/requirements.yml
@@ -0,0 +1,4 @@
+roles: []
+collections: []
+integration_tests_dependencies: [] # <-- invalid key
+unit_tests_dependencies: [] # <-- invalid key
diff --git a/examples/reqs_v1/requirements.yml b/examples/reqs_v1/requirements.yml
new file mode 100644
index 0000000..18693b8
--- /dev/null
+++ b/examples/reqs_v1/requirements.yml
@@ -0,0 +1,4 @@
+# v1 requirements test file
+# ansible-galaxy role install -r requirements.yml -p roles
+- src: git+https://github.com/geerlingguy/ansible-role-docker.git
+ name: geerlingguy.mysql
diff --git a/examples/reqs_v2/community-molecule-0.1.0.tar.gz b/examples/reqs_v2/community-molecule-0.1.0.tar.gz
new file mode 100644
index 0000000..b5af2f0
--- /dev/null
+++ b/examples/reqs_v2/community-molecule-0.1.0.tar.gz
Binary files differ
diff --git a/examples/reqs_v2/requirements.yml b/examples/reqs_v2/requirements.yml
new file mode 100644
index 0000000..190dc39
--- /dev/null
+++ b/examples/reqs_v2/requirements.yml
@@ -0,0 +1,21 @@
+---
+# For local install run:
+# ansible-galaxy role install -r requirements.yml -p roles
+# ansible-galaxy collection install -r requirements.yml -p collections
+#
+# Current test file avoids using galaxy server on purpose, for resiliency
+roles:
+ - src: git+https://github.com/geerlingguy/ansible-role-docker.git
+ name: geerlingguy.mysql
+collections:
+ - name: community-molecule-0.1.0.tar.gz
+ # Also needed for testing purposes as this should trigger addition of --pre
+ # argument as this is required due to
+ # https://github.com/ansible/ansible-lint/issues/3686
+ # https://github.com/ansible/ansible/issues/79109
+ - name: https://github.com/ansible-collections/amazon.aws.git
+ type: git
+ version: main
+ - name: https://github.com/ansible-collections/community.aws.git
+ type: git
+ version: main
diff --git a/mkdocs.yml b/mkdocs.yml
new file mode 100644
index 0000000..0eca7b4
--- /dev/null
+++ b/mkdocs.yml
@@ -0,0 +1,90 @@
+---
+site_name: Ansible Compat Library
+site_url: https://ansible-compat.readthedocs.io/
+repo_url: https://github.com/ansible/ansible-compat
+edit_uri: blob/main/docs/
+copyright: Copyright © 2023 Red Hat, Inc.
+docs_dir: docs
+# strict: true
+watch:
+ - mkdocs.yml
+ - src
+ - docs
+
+theme:
+ name: ansible
+ features:
+ - content.code.copy
+ - content.action.edit
+ - navigation.expand
+ - navigation.sections
+ - navigation.instant
+ - navigation.indexes
+ - navigation.tracking
+ - toc.integrate
+extra:
+ social:
+ - icon: fontawesome/brands/github-alt
+ link: https://github.com/ansible/ansible-compat
+nav:
+ - examples: index.md
+ - api: api.md
+plugins:
+ - autorefs
+ - search
+ - material/social
+ - material/tags
+ - mkdocstrings:
+ handlers:
+ python:
+ import:
+ - https://docs.python.org/3/objects.inv
+ options:
+ # heading_level: 2
+ docstring_style: sphinx
+ docstring_options:
+ ignore_init_summary: yes
+
+ show_submodules: no
+ docstring_section_style: list
+ members_order: alphabetical
+ show_category_heading: no
+ # cannot merge init into class due to parse error...
+ # merge_init_into_class: yes
+ # separate_signature: yes
+ show_root_heading: yes
+ show_signature_annotations: yes
+ separate_signature: yes
+ # show_bases: false
+ # options:
+ # show_root_heading: true
+ # docstring_style: sphinx
+
+markdown_extensions:
+ - markdown_include.include:
+ base_path: docs
+ - admonition
+ - def_list
+ - footnotes
+ - pymdownx.highlight:
+ anchor_linenums: true
+ - pymdownx.inlinehilite
+ - pymdownx.superfences
+ - pymdownx.magiclink:
+ repo_url_shortener: true
+ repo_url_shorthand: true
+ social_url_shorthand: true
+ social_url_shortener: true
+ user: facelessuser
+ repo: pymdown-extensions
+ normalize_issue_symbols: true
+ - pymdownx.tabbed:
+ alternate_style: true
+ - toc:
+ toc_depth: 2
+ permalink: true
+ - pymdownx.superfences:
+ custom_fences:
+ - name: mermaid
+ class: mermaid
+ format: !!python/name:pymdownx.superfences.fence_code_format
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..0e7d843
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,165 @@
+[build-system]
+requires = [
+ "setuptools >= 65.3.0", # required by pyproject+setuptools_scm integration and editable installs
+ "setuptools_scm[toml] >= 7.0.5" # required for "no-local-version" scheme
+]
+build-backend = "setuptools.build_meta"
+
+[project]
+# https://peps.python.org/pep-0621/#readme
+requires-python = ">=3.9"
+dynamic = ["version"]
+name = "ansible-compat"
+description = "Ansible compatibility goodies"
+readme = "README.md"
+authors = [{"name" = "Sorin Sbarnea", "email" = "ssbarnea@redhat.com"}]
+maintainers = [{"name" = "Sorin Sbarnea", "email" = "ssbarnea@redhat.com"}]
+license = {text = "MIT"}
+classifiers = [
+ "Development Status :: 5 - Production/Stable",
+ "Environment :: Console",
+ "Intended Audience :: Developers",
+ "Intended Audience :: Information Technology",
+ "Intended Audience :: System Administrators",
+ "License :: OSI Approved :: MIT License",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+ "Programming Language :: Python",
+ "Topic :: System :: Systems Administration",
+ "Topic :: Software Development :: Bug Tracking",
+ "Topic :: Software Development :: Quality Assurance",
+ "Topic :: Software Development :: Testing",
+ "Topic :: Utilities"
+]
+keywords = ["ansible"]
+dependencies = [
+ # https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html
+ "ansible-core>=2.12",
+ "packaging",
+ "PyYAML",
+ "subprocess-tee>=0.4.1",
+ "jsonschema>=4.6.0",
+ "typing-extensions>=4.5.0;python_version<'3.10'"
+]
+
+[project.optional-dependencies]
+docs = ["argparse-manpage", "black", "mkdocs-ansible[lock]>=0.1.2"]
+test = ["coverage", "pip-tools", "pytest>=7.2.0", "pytest-mock", "pytest-plus>=0.6.1"]
+
+[project.urls]
+homepage = "https://github.com/ansible/ansible-compat"
+documentation = "https://ansible-compat.readthedocs.io/"
+repository = "https://github.com/ansible/ansible-compat"
+changelog = "https://github.com/ansible/ansible-compat/releases"
+
+[tool.coverage.report]
+exclude_lines = ["pragma: no cover", "if TYPE_CHECKING:"]
+fail_under = 92
+skip_covered = true
+show_missing = true
+
+[tool.coverage.run]
+source = ["src"]
+# Do not use branch until bug is fixes:
+# https://github.com/nedbat/coveragepy/issues/605
+branch = false
+parallel = true
+concurrency = ["multiprocessing", "thread"]
+
+[tool.isort]
+profile = "black"
+
+[tool.mypy]
+python_version = 3.9
+color_output = true
+error_summary = true
+disallow_untyped_calls = true
+disallow_untyped_defs = true
+disallow_any_generics = true
+# disallow_any_unimported = True
+# ; warn_redundant_casts = True
+# warn_return_any = True
+# warn_unused_configs = True
+exclude = "test/local-content"
+
+[[tool.mypy.overrides]]
+module = "ansible.*"
+ignore_missing_imports = true
+
+[tool.pylint.BASIC]
+good-names = [
+ "f", # filename
+ "i",
+ "j",
+ "k",
+ "ns", # namespace
+ "ex",
+ "Run",
+ "_"
+]
+
+[tool.pylint.IMPORTS]
+preferred-modules = ["unittest:pytest"]
+
+[tool.pylint."MESSAGES CONTROL"]
+disable = [
+ # On purpose disabled as we rely on black
+ "line-too-long",
+ # local imports do not work well with pre-commit hook
+ "import-error",
+ # already covered by ruff which is faster
+ "too-many-arguments", # PLR0913
+ "raise-missing-from",
+ # Temporary disable duplicate detection we remove old code from prerun
+ "duplicate-code"
+]
+
+[tool.pytest.ini_options]
+# ensure we treat warnings as error
+filterwarnings = [
+ "error",
+ # py312 ansible-core
+ # https://github.com/ansible/ansible/issues/81906
+ "ignore:'importlib.abc.TraversableResources' is deprecated and slated for removal in Python 3.14:DeprecationWarning"
+]
+testpaths = ["test"]
+
+[tool.ruff]
+select = ["ALL"]
+ignore = [
+ # Disabled on purpose:
+ "ANN101", # Missing type annotation for `self` in method
+ "D203", # incompatible with D211
+ "D211",
+ "D213", # incompatible with D212
+ "E501", # we use black
+ "RET504", # Unnecessary variable assignment before `return` statement
+ # Temporary disabled during adoption:
+ "S607", # Starting a process with a partial executable path
+ "PLR0912", # Bug https://github.com/charliermarsh/ruff/issues/4244
+ "PLR0913", # Bug https://github.com/charliermarsh/ruff/issues/4244
+ "RUF012",
+ "PERF203"
+]
+target-version = "py39"
+
+[tool.ruff.flake8-pytest-style]
+parametrize-values-type = "tuple"
+
+[tool.ruff.isort]
+known-first-party = ["ansible_compat"]
+known-third-party = ["packaging"]
+
+[tool.ruff.per-file-ignores]
+"test/**/*.py" = ["SLF001", "S101", "FBT001"]
+
+[tool.ruff.pydocstyle]
+convention = "pep257"
+
+[tool.setuptools_scm]
+local_scheme = "no-local-version"
+write_to = "src/ansible_compat/_version.py"
diff --git a/readthedocs.yml b/readthedocs.yml
new file mode 100644
index 0000000..c3cc744
--- /dev/null
+++ b/readthedocs.yml
@@ -0,0 +1,20 @@
+version: 2
+
+submodules:
+ include: all
+ recursive: true
+
+mkdocs:
+ fail_on_warning: true
+
+build:
+ os: ubuntu-22.04
+ tools:
+ python: "3.11"
+
+python:
+ install:
+ - method: pip
+ path: .
+ extra_requirements:
+ - docs
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..1d4ed20
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,334 @@
+#
+# This file is autogenerated by pip-compile with Python 3.10
+# by the following command:
+#
+# pip-compile --extra=docs --extra=test --output-file=requirements.txt --strip-extras --unsafe-package=ansible-core --unsafe-package=resolvelib --unsafe-package=typing_extensions pyproject.toml
+#
+argparse-manpage==4.5
+ # via ansible-compat (pyproject.toml)
+attrs==23.2.0
+ # via
+ # jsonschema
+ # referencing
+babel==2.13.1
+ # via
+ # mkdocs-ansible
+ # mkdocs-material
+beautifulsoup4==4.12.2
+ # via
+ # linkchecker
+ # mkdocs-ansible
+ # mkdocs-htmlproofer-plugin
+black==23.12.1
+ # via ansible-compat (pyproject.toml)
+build==1.0.3
+ # via pip-tools
+cairocffi==1.6.1
+ # via
+ # cairosvg
+ # mkdocs-ansible
+cairosvg==2.7.1
+ # via mkdocs-ansible
+certifi==2023.11.17
+ # via
+ # mkdocs-ansible
+ # requests
+cffi==1.16.0
+ # via
+ # cairocffi
+ # cryptography
+ # mkdocs-ansible
+charset-normalizer==3.3.2
+ # via
+ # mkdocs-ansible
+ # requests
+click==8.1.7
+ # via
+ # black
+ # mkdocs
+ # mkdocs-ansible
+ # mkdocstrings
+ # pip-tools
+colorama==0.4.6
+ # via
+ # griffe
+ # mkdocs-ansible
+ # mkdocs-material
+coverage==7.4.0
+ # via ansible-compat (pyproject.toml)
+cryptography==41.0.7
+ # via ansible-core
+csscompressor==0.9.5
+ # via
+ # mkdocs-ansible
+ # mkdocs-minify-plugin
+cssselect2==0.7.0
+ # via
+ # cairosvg
+ # mkdocs-ansible
+defusedxml==0.7.1
+ # via
+ # cairosvg
+ # mkdocs-ansible
+dnspython==2.4.2
+ # via
+ # linkchecker
+ # mkdocs-ansible
+exceptiongroup==1.2.0
+ # via pytest
+ghp-import==2.1.0
+ # via
+ # mkdocs
+ # mkdocs-ansible
+griffe==0.38.0
+ # via
+ # mkdocs-ansible
+ # mkdocstrings-python
+htmlmin2==0.1.13
+ # via
+ # mkdocs-ansible
+ # mkdocs-minify-plugin
+idna==3.4
+ # via
+ # mkdocs-ansible
+ # requests
+importlib-metadata==6.8.0
+ # via mkdocs-ansible
+iniconfig==2.0.0
+ # via pytest
+jinja2==3.1.2
+ # via
+ # ansible-core
+ # mkdocs
+ # mkdocs-ansible
+ # mkdocs-material
+ # mkdocstrings
+jsmin==3.0.1
+ # via
+ # mkdocs-ansible
+ # mkdocs-minify-plugin
+jsonschema==4.20.0
+ # via ansible-compat (pyproject.toml)
+jsonschema-specifications==2023.12.1
+ # via jsonschema
+linkchecker==10.3.0
+ # via mkdocs-ansible
+markdown==3.5.1
+ # via
+ # markdown-include
+ # mkdocs
+ # mkdocs-ansible
+ # mkdocs-autorefs
+ # mkdocs-htmlproofer-plugin
+ # mkdocs-material
+ # mkdocstrings
+ # pymdown-extensions
+markdown-exec==1.8.0
+ # via mkdocs-ansible
+markdown-include==0.8.1
+ # via mkdocs-ansible
+markupsafe==2.1.3
+ # via
+ # jinja2
+ # mkdocs
+ # mkdocs-ansible
+ # mkdocstrings
+mergedeep==1.3.4
+ # via
+ # mkdocs
+ # mkdocs-ansible
+mkdocs==1.5.3
+ # via
+ # mkdocs-ansible
+ # mkdocs-autorefs
+ # mkdocs-gen-files
+ # mkdocs-htmlproofer-plugin
+ # mkdocs-material
+ # mkdocs-minify-plugin
+ # mkdocs-monorepo-plugin
+ # mkdocstrings
+mkdocs-ansible==0.2.1
+ # via
+ # ansible-compat (pyproject.toml)
+ # mkdocs-ansible
+mkdocs-autorefs==0.5.0
+ # via
+ # mkdocs-ansible
+ # mkdocstrings
+mkdocs-gen-files==0.5.0
+ # via mkdocs-ansible
+mkdocs-htmlproofer-plugin==1.0.0
+ # via mkdocs-ansible
+mkdocs-material==9.5.3
+ # via mkdocs-ansible
+mkdocs-material-extensions==1.3.1
+ # via
+ # mkdocs-ansible
+ # mkdocs-material
+mkdocs-minify-plugin==0.7.2
+ # via mkdocs-ansible
+mkdocs-monorepo-plugin==1.1.0
+ # via mkdocs-ansible
+mkdocstrings==0.24.0
+ # via
+ # mkdocs-ansible
+ # mkdocstrings-python
+mkdocstrings-python==1.7.5
+ # via mkdocs-ansible
+mypy-extensions==1.0.0
+ # via black
+packaging==23.2
+ # via
+ # ansible-compat (pyproject.toml)
+ # ansible-core
+ # black
+ # build
+ # mkdocs
+ # mkdocs-ansible
+ # pytest
+paginate==0.5.6
+ # via
+ # mkdocs-ansible
+ # mkdocs-material
+pathspec==0.11.2
+ # via
+ # black
+ # mkdocs
+ # mkdocs-ansible
+pillow==10.2.0
+ # via
+ # cairosvg
+ # mkdocs-ansible
+pip==23.3.2
+ # via pip-tools
+pip-tools==7.3.0
+ # via ansible-compat (pyproject.toml)
+pipdeptree==2.13.1
+ # via mkdocs-ansible
+platformdirs==4.0.0
+ # via
+ # black
+ # mkdocs
+ # mkdocs-ansible
+ # mkdocstrings
+pluggy==1.3.0
+ # via pytest
+pycparser==2.21
+ # via
+ # cffi
+ # mkdocs-ansible
+pygments==2.17.1
+ # via
+ # mkdocs-ansible
+ # mkdocs-material
+pymdown-extensions==10.7
+ # via
+ # markdown-exec
+ # mkdocs-ansible
+ # mkdocs-material
+ # mkdocstrings
+pyproject-hooks==1.0.0
+ # via build
+pytest==7.4.4
+ # via
+ # ansible-compat (pyproject.toml)
+ # pytest-mock
+ # pytest-plus
+pytest-mock==3.12.0
+ # via ansible-compat (pyproject.toml)
+pytest-plus==0.6.1
+ # via ansible-compat (pyproject.toml)
+python-dateutil==2.8.2
+ # via
+ # ghp-import
+ # mkdocs-ansible
+python-slugify==8.0.1
+ # via
+ # mkdocs-ansible
+ # mkdocs-monorepo-plugin
+pyyaml==6.0.1
+ # via
+ # ansible-compat (pyproject.toml)
+ # ansible-core
+ # mkdocs
+ # mkdocs-ansible
+ # pymdown-extensions
+ # pyyaml-env-tag
+pyyaml-env-tag==0.1
+ # via
+ # mkdocs
+ # mkdocs-ansible
+referencing==0.32.1
+ # via
+ # jsonschema
+ # jsonschema-specifications
+regex==2023.10.3
+ # via
+ # mkdocs-ansible
+ # mkdocs-material
+requests==2.31.0
+ # via
+ # linkchecker
+ # mkdocs-ansible
+ # mkdocs-htmlproofer-plugin
+ # mkdocs-material
+rpds-py==0.16.2
+ # via
+ # jsonschema
+ # referencing
+setuptools==69.0.3
+ # via pip-tools
+six==1.16.0
+ # via
+ # mkdocs-ansible
+ # python-dateutil
+soupsieve==2.5
+ # via
+ # beautifulsoup4
+ # mkdocs-ansible
+subprocess-tee==0.4.1
+ # via ansible-compat (pyproject.toml)
+text-unidecode==1.3
+ # via
+ # mkdocs-ansible
+ # python-slugify
+tinycss2==1.2.1
+ # via
+ # cairosvg
+ # cssselect2
+ # mkdocs-ansible
+tomli==2.0.1
+ # via
+ # argparse-manpage
+ # black
+ # build
+ # pip-tools
+ # pyproject-hooks
+ # pytest
+typing-extensions==4.8.0
+ # via
+ # black
+ # mkdocs-ansible
+urllib3==2.1.0
+ # via
+ # mkdocs-ansible
+ # requests
+watchdog==3.0.0
+ # via
+ # mkdocs
+ # mkdocs-ansible
+webencodings==0.5.1
+ # via
+ # cssselect2
+ # mkdocs-ansible
+ # tinycss2
+wheel==0.42.0
+ # via pip-tools
+zipp==3.17.0
+ # via
+ # importlib-metadata
+ # mkdocs-ansible
+
+# The following packages are considered to be unsafe in a requirements file:
+# ansible-core
+# resolvelib
diff --git a/src/ansible_compat/__init__.py b/src/ansible_compat/__init__.py
new file mode 100644
index 0000000..b23c8ca
--- /dev/null
+++ b/src/ansible_compat/__init__.py
@@ -0,0 +1,9 @@
+"""ansible_compat package."""
+from importlib.metadata import PackageNotFoundError, version
+
+try:
+ __version__ = version("ansible-compat")
+except PackageNotFoundError: # pragma: no cover
+ __version__ = "0.1.dev1"
+
+__all__ = ["__version__"]
diff --git a/src/ansible_compat/config.py b/src/ansible_compat/config.py
new file mode 100644
index 0000000..a0b41b7
--- /dev/null
+++ b/src/ansible_compat/config.py
@@ -0,0 +1,465 @@
+"""Store configuration options as a singleton."""
+from __future__ import annotations
+
+import ast
+import copy
+import os
+import re
+import subprocess
+from collections import UserDict
+from typing import Literal
+
+from packaging.version import Version
+
+from ansible_compat.constants import ANSIBLE_MIN_VERSION
+from ansible_compat.errors import InvalidPrerequisiteError, MissingAnsibleError
+from ansible_compat.ports import cache
+
+
+# do not use lru_cache here, as environment can change between calls
+def ansible_collections_path() -> str:
+ """Return collection path variable for current version of Ansible."""
+ for env_var in [
+ "ANSIBLE_COLLECTIONS_PATH",
+ "ANSIBLE_COLLECTIONS_PATHS",
+ ]:
+ if env_var in os.environ:
+ return env_var
+ return "ANSIBLE_COLLECTIONS_PATH"
+
+
+def parse_ansible_version(stdout: str) -> Version:
+ """Parse output of 'ansible --version'."""
+ # Ansible can produce extra output before displaying version in debug mode.
+
+ # ansible-core 2.11+: 'ansible [core 2.11.3]'
+ match = re.search(
+ r"^ansible \[(?:core|base) (?P<version>[^\]]+)\]",
+ stdout,
+ re.MULTILINE,
+ )
+ if match:
+ return Version(match.group("version"))
+ msg = f"Unable to parse ansible cli version: {stdout}\nKeep in mind that only {ANSIBLE_MIN_VERSION } or newer are supported."
+ raise InvalidPrerequisiteError(msg)
+
+
+@cache
+def ansible_version(version: str = "") -> Version:
+ """Return current Version object for Ansible.
+
+ If version is not mentioned, it returns current version as detected.
+ When version argument is mentioned, it return converts the version string
+ to Version object in order to make it usable in comparisons.
+ """
+ if version:
+ return Version(version)
+
+ proc = subprocess.run(
+ ["ansible", "--version"], # noqa: S603
+ text=True,
+ check=False,
+ capture_output=True,
+ )
+ if proc.returncode != 0:
+ raise MissingAnsibleError(proc=proc)
+
+ return parse_ansible_version(proc.stdout)
+
+
+class AnsibleConfig(UserDict[str, object]): # pylint: disable=too-many-ancestors
+ """Interface to query Ansible configuration.
+
+ This should allow user to access everything provided by `ansible-config dump` without having to parse the data himself.
+ """
+
+ _aliases = {
+ "COLLECTIONS_PATH": "COLLECTIONS_PATHS", # 2.9 -> 2.10
+ }
+ # Expose some attributes to enable auto-complete in editors, based on
+ # https://docs.ansible.com/ansible/latest/reference_appendices/config.html
+ action_warnings: bool = True
+ agnostic_become_prompt: bool = True
+ allow_world_readable_tmpfiles: bool = False
+ ansible_connection_path: str | None = None
+ ansible_cow_acceptlist: list[str]
+ ansible_cow_path: str | None = None
+ ansible_cow_selection: str = "default"
+ ansible_force_color: bool = False
+ ansible_nocolor: bool = False
+ ansible_nocows: bool = False
+ ansible_pipelining: bool = False
+ any_errors_fatal: bool = False
+ become_allow_same_user: bool = False
+ become_plugin_path: list[str] = [
+ "~/.ansible/plugins/become",
+ "/usr/share/ansible/plugins/become",
+ ]
+ cache_plugin: str = "memory"
+ cache_plugin_connection: str | None = None
+ cache_plugin_prefix: str = "ansible_facts"
+ cache_plugin_timeout: int = 86400
+ callable_accept_list: list[str] = []
+ callbacks_enabled: list[str] = []
+ collections_on_ansible_version_mismatch: Literal["warning", "ignore"] = "warning"
+ collections_paths: list[str] = [
+ "~/.ansible/collections",
+ "/usr/share/ansible/collections",
+ ]
+ collections_scan_sys_path: bool = True
+ color_changed: str = "yellow"
+ color_console_prompt: str = "white"
+ color_debug: str = "dark gray"
+ color_deprecate: str = "purple"
+ color_diff_add: str = "green"
+ color_diff_lines: str = "cyan"
+ color_diff_remove: str = "red"
+ color_error: str = "red"
+ color_highlight: str = "white"
+ color_ok: str = "green"
+ color_skip: str = "cyan"
+ color_unreachable: str = "bright red"
+ color_verbose: str = "blue"
+ color_warn: str = "bright purple"
+ command_warnings: bool = False
+ conditional_bare_vars: bool = False
+ connection_facts_modules: dict[str, str]
+ controller_python_warning: bool = True
+ coverage_remote_output: str | None
+ coverage_remote_paths: list[str]
+ default_action_plugin_path: list[str] = [
+ "~/.ansible/plugins/action",
+ "/usr/share/ansible/plugins/action",
+ ]
+ default_allow_unsafe_lookups: bool = False
+ default_ask_pass: bool = False
+ default_ask_vault_pass: bool = False
+ default_become: bool = False
+ default_become_ask_pass: bool = False
+ default_become_exe: str | None = None
+ default_become_flags: str
+ default_become_method: str = "sudo"
+ default_become_user: str = "root"
+ default_cache_plugin_path: list[str] = [
+ "~/.ansible/plugins/cache",
+ "/usr/share/ansible/plugins/cache",
+ ]
+ default_callback_plugin_path: list[str] = [
+ "~/.ansible/plugins/callback",
+ "/usr/share/ansible/plugins/callback",
+ ]
+ default_cliconf_plugin_path: list[str] = [
+ "~/.ansible/plugins/cliconf",
+ "/usr/share/ansible/plugins/cliconf",
+ ]
+ default_connection_plugin_path: list[str] = [
+ "~/.ansible/plugins/connection",
+ "/usr/share/ansible/plugins/connection",
+ ]
+ default_debug: bool = False
+ default_executable: str = "/bin/sh"
+ default_fact_path: str | None = None
+ default_filter_plugin_path: list[str] = [
+ "~/.ansible/plugins/filter",
+ "/usr/share/ansible/plugins/filter",
+ ]
+ default_force_handlers: bool = False
+ default_forks: int = 5
+ default_gathering: Literal["smart", "explicit", "implicit"] = "smart"
+ default_gather_subset: list[str] = ["all"]
+ default_gather_timeout: int = 10
+ default_handler_includes_static: bool = False
+ default_hash_behaviour: str = "replace"
+ default_host_list: list[str] = ["/etc/ansible/hosts"]
+ default_httpapi_plugin_path: list[str] = [
+ "~/.ansible/plugins/httpapi",
+ "/usr/share/ansible/plugins/httpapi",
+ ]
+ default_internal_poll_interval: float = 0.001
+ default_inventory_plugin_path: list[str] = [
+ "~/.ansible/plugins/inventory",
+ "/usr/share/ansible/plugins/inventory",
+ ]
+ default_jinja2_extensions: list[str] = []
+ default_jinja2_native: bool = False
+ default_keep_remote_files: bool = False
+ default_libvirt_lxc_noseclabel: bool = False
+ default_load_callback_plugins: bool = False
+ default_local_tmp: str = "~/.ansible/tmp"
+ default_log_filter: list[str] = []
+ default_log_path: str | None = None
+ default_lookup_lugin_path: list[str] = [
+ "~/.ansible/plugins/lookup",
+ "/usr/share/ansible/plugins/lookup",
+ ]
+ default_managed_str: str = "Ansible managed"
+ default_module_args: str
+ default_module_compression: str = "ZIP_DEFLATED"
+ default_module_name: str = "command"
+ default_module_path: list[str] = [
+ "~/.ansible/plugins/modules",
+ "/usr/share/ansible/plugins/modules",
+ ]
+ default_module_utils_path: list[str] = [
+ "~/.ansible/plugins/module_utils",
+ "/usr/share/ansible/plugins/module_utils",
+ ]
+ default_netconf_plugin_path: list[str] = [
+ "~/.ansible/plugins/netconf",
+ "/usr/share/ansible/plugins/netconf",
+ ]
+ default_no_log: bool = False
+ default_no_target_syslog: bool = False
+ default_null_representation: str | None = None
+ default_poll_interval: int = 15
+ default_private_key_file: str | None = None
+ default_private_role_vars: bool = False
+ default_remote_port: str | None = None
+ default_remote_user: str | None = None
+ # https://docs.ansible.com/ansible/latest/reference_appendices/config.html#collections-paths
+ default_collections_path: list[str] = [
+ "~/.ansible/collections",
+ "/usr/share/ansible/collections",
+ ]
+ default_roles_path: list[str] = [
+ "~/.ansible/roles",
+ "/usr/share/ansible/roles",
+ "/etc/ansible/roles",
+ ]
+ default_selinux_special_fs: list[str] = [
+ "fuse",
+ "nfs",
+ "vboxsf",
+ "ramfs",
+ "9p",
+ "vfat",
+ ]
+ default_stdout_callback: str = "default"
+ default_strategy: str = "linear"
+ default_strategy_plugin_path: list[str] = [
+ "~/.ansible/plugins/strategy",
+ "/usr/share/ansible/plugins/strategy",
+ ]
+ default_su: bool = False
+ default_syslog_facility: str = "LOG_USER"
+ default_task_includes_static: bool = False
+ default_terminal_plugin_path: list[str] = [
+ "~/.ansible/plugins/terminal",
+ "/usr/share/ansible/plugins/terminal",
+ ]
+ default_test_plugin_path: list[str] = [
+ "~/.ansible/plugins/test",
+ "/usr/share/ansible/plugins/test",
+ ]
+ default_timeout: int = 10
+ default_transport: str = "smart"
+ default_undefined_var_behavior: bool = True
+ default_vars_plugin_path: list[str] = [
+ "~/.ansible/plugins/vars",
+ "/usr/share/ansible/plugins/vars",
+ ]
+ default_vault_encrypt_identity: str | None = None
+ default_vault_identity: str = "default"
+ default_vault_identity_list: list[str] = []
+ default_vault_id_match: bool = False
+ default_vault_password_file: str | None = None
+ default_verbosity: int = 0
+ deprecation_warnings: bool = False
+ devel_warning: bool = True
+ diff_always: bool = False
+ diff_context: int = 3
+ display_args_to_stdout: bool = False
+ display_skipped_hosts: bool = True
+ docsite_root_url: str = "https://docs.ansible.com/ansible/"
+ doc_fragment_plugin_path: list[str] = [
+ "~/.ansible/plugins/doc_fragments",
+ "/usr/share/ansible/plugins/doc_fragments",
+ ]
+ duplicate_yaml_dict_key: Literal["warn", "error", "ignore"] = "warn"
+ enable_task_debugger: bool = False
+ error_on_missing_handler: bool = True
+ facts_modules: list[str] = ["smart"]
+ galaxy_cache_dir: str = "~/.ansible/galaxy_cache"
+ galaxy_display_progress: str | None = None
+ galaxy_ignore_certs: bool = False
+ galaxy_role_skeleton: str | None = None
+ galaxy_role_skeleton_ignore: list[str] = ["^.git$", "^.*/.git_keep$"]
+ galaxy_server: str = "https://galaxy.ansible.com"
+ galaxy_server_list: str | None = None
+ galaxy_token_path: str = "~/.ansible/galaxy_token"
+ host_key_checking: bool = True
+ host_pattern_mismatch: Literal["warning", "error", "ignore"] = "warning"
+ inject_facts_as_vars: bool = True
+ interpreter_python: str = "auto_legacy"
+ interpreter_python_distro_map: dict[str, str]
+ interpreter_python_fallback: list[str]
+ invalid_task_attribute_failed: bool = True
+ inventory_any_unparsed_is_failed: bool = False
+ inventory_cache_enabled: bool = False
+ inventory_cache_plugin: str | None = None
+ inventory_cache_plugin_connection: str | None = None
+ inventory_cache_plugin_prefix: str = "ansible_facts"
+ inventory_cache_timeout: int = 3600
+ inventory_enabled: list[str] = [
+ "host_list",
+ "script",
+ "auto",
+ "yaml",
+ "ini",
+ "toml",
+ ]
+ inventory_export: bool = False
+ inventory_ignore_exts: str
+ inventory_ignore_patterns: list[str] = []
+ inventory_unparsed_is_failed: bool = False
+ localhost_warning: bool = True
+ max_file_size_for_diff: int = 104448
+ module_ignore_exts: str
+ netconf_ssh_config: str | None = None
+ network_group_modules: list[str] = [
+ "eos",
+ "nxos",
+ "ios",
+ "iosxr",
+ "junos",
+ "enos",
+ "ce",
+ "vyos",
+ "sros",
+ "dellos9",
+ "dellos10",
+ "dellos6",
+ "asa",
+ "aruba",
+ "aireos",
+ "bigip",
+ "ironware",
+ "onyx",
+ "netconf",
+ "exos",
+ "voss",
+ "slxos",
+ ]
+ old_plugin_cache_clearing: bool = False
+ paramiko_host_key_auto_add: bool = False
+ paramiko_look_for_keys: bool = True
+ persistent_command_timeout: int = 30
+ persistent_connect_retry_timeout: int = 15
+ persistent_connect_timeout: int = 30
+ persistent_control_path_dir: str = "~/.ansible/pc"
+ playbook_dir: str | None
+ playbook_vars_root: Literal["top", "bottom", "all"] = "top"
+ plugin_filters_cfg: str | None = None
+ python_module_rlimit_nofile: int = 0
+ retry_files_enabled: bool = False
+ retry_files_save_path: str | None = None
+ run_vars_plugins: str = "demand"
+ show_custom_stats: bool = False
+ string_conversion_action: Literal["warn", "error", "ignore"] = "warn"
+ string_type_filters: list[str] = [
+ "string",
+ "to_json",
+ "to_nice_json",
+ "to_yaml",
+ "to_nice_yaml",
+ "ppretty",
+ "json",
+ ]
+ system_warnings: bool = True
+ tags_run: list[str] = []
+ tags_skip: list[str] = []
+ task_debugger_ignore_errors: bool = True
+ task_timeout: int = 0
+ transform_invalid_group_chars: Literal[
+ "always",
+ "never",
+ "ignore",
+ "silently",
+ ] = "never"
+ use_persistent_connections: bool = False
+ variable_plugins_enabled: list[str] = ["host_group_vars"]
+ variable_precedence: list[str] = [
+ "all_inventory",
+ "groups_inventory",
+ "all_plugins_inventory",
+ "all_plugins_play",
+ "groups_plugins_inventory",
+ "groups_plugins_play",
+ ]
+ verbose_to_stderr: bool = False
+ win_async_startup_timeout: int = 5
+ worker_shutdown_poll_count: int = 0
+ worker_shutdown_poll_delay: float = 0.1
+ yaml_filename_extensions: list[str] = [".yml", ".yaml", ".json"]
+
+ def __init__(
+ self,
+ config_dump: str | None = None,
+ data: dict[str, object] | None = None,
+ ) -> None:
+ """Load config dictionary."""
+ super().__init__()
+
+ if data:
+ self.data = copy.deepcopy(data)
+ return
+
+ if not config_dump:
+ env = os.environ.copy()
+ # Avoid possible ANSI garbage
+ env["ANSIBLE_FORCE_COLOR"] = "0"
+ config_dump = subprocess.check_output(
+ ["ansible-config", "dump"], # noqa: S603
+ universal_newlines=True,
+ env=env,
+ )
+
+ for match in re.finditer(
+ r"^(?P<key>[A-Za-z0-9_]+).* = (?P<value>.*)$",
+ config_dump,
+ re.MULTILINE,
+ ):
+ key = match.groupdict()["key"]
+ value = match.groupdict()["value"]
+ try:
+ self[key] = ast.literal_eval(value)
+ except (NameError, SyntaxError, ValueError):
+ self[key] = value
+
+ def __getattribute__(self, attr_name: str) -> object:
+ """Allow access of config options as attributes."""
+ _dict = super().__dict__ # pylint: disable=no-member
+ if attr_name in _dict:
+ return _dict[attr_name]
+
+ data = super().__getattribute__("data")
+ if attr_name == "data": # pragma: no cover
+ return data
+
+ name = attr_name.upper()
+ if name in data:
+ return data[name]
+ if name in AnsibleConfig._aliases:
+ return data[AnsibleConfig._aliases[name]]
+
+ return super().__getattribute__(attr_name)
+
+ def __getitem__(self, name: str) -> object:
+ """Allow access to config options using indexing."""
+ return super().__getitem__(name.upper())
+
+ def __copy__(self) -> AnsibleConfig:
+ """Allow users to run copy on Config."""
+ return AnsibleConfig(data=self.data)
+
+ def __deepcopy__(self, memo: object) -> AnsibleConfig:
+ """Allow users to run deeepcopy on Config."""
+ return AnsibleConfig(data=self.data)
+
+
+__all__ = [
+ "ansible_collections_path",
+ "parse_ansible_version",
+ "ansible_version",
+ "AnsibleConfig",
+]
diff --git a/src/ansible_compat/constants.py b/src/ansible_compat/constants.py
new file mode 100644
index 0000000..f3d7866
--- /dev/null
+++ b/src/ansible_compat/constants.py
@@ -0,0 +1,42 @@
+"""Constants used by ansible_compat."""
+
+from pathlib import Path
+
+META_MAIN = (Path("meta") / Path("main.yml"), Path("meta") / Path("main.yaml"))
+REQUIREMENT_LOCATIONS = [
+ "requirements.yml",
+ "roles/requirements.yml",
+ "collections/requirements.yml",
+ # These is more of less the official way to store test requirements in collections so far, comments shows number of repos using this reported by https://sourcegraph.com/ at the time of writing
+ "tests/requirements.yml", # 170
+ "tests/integration/requirements.yml", # 3
+ "tests/unit/requirements.yml", # 1
+]
+
+# Minimal version of Ansible we support for runtime
+ANSIBLE_MIN_VERSION = "2.12"
+
+# Based on https://docs.ansible.com/ansible/latest/reference_appendices/config.html
+ANSIBLE_DEFAULT_ROLES_PATH = (
+ "~/.ansible/roles:/usr/share/ansible/roles:/etc/ansible/roles"
+)
+
+INVALID_CONFIG_RC = 2
+ANSIBLE_MISSING_RC = 4
+INVALID_PREREQUISITES_RC = 10
+
+MSG_INVALID_FQRL = """\
+Computed fully qualified role name of {0} does not follow current galaxy requirements.
+Please edit meta/main.yml and assure we can correctly determine full role name:
+
+galaxy_info:
+role_name: my_name # if absent directory name hosting role is used instead
+namespace: my_galaxy_namespace # if absent, author is used instead
+
+Namespace: https://galaxy.ansible.com/docs/contributing/namespaces.html#galaxy-namespace-limitations
+Role: https://galaxy.ansible.com/docs/contributing/creating_role.html#role-names
+
+As an alternative, you can add 'role-name' to either skip_list or warn_list.
+"""
+
+RC_ANSIBLE_OPTIONS_ERROR = 5
diff --git a/src/ansible_compat/errors.py b/src/ansible_compat/errors.py
new file mode 100644
index 0000000..6369412
--- /dev/null
+++ b/src/ansible_compat/errors.py
@@ -0,0 +1,57 @@
+"""Module to deal with errors."""
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+
+from ansible_compat.constants import ANSIBLE_MISSING_RC, INVALID_PREREQUISITES_RC
+
+if TYPE_CHECKING:
+ from subprocess import CompletedProcess
+
+
+class AnsibleCompatError(RuntimeError):
+ """Generic error originating from ansible_compat library."""
+
+ code = 1 # generic error
+
+ def __init__(
+ self,
+ message: str | None = None,
+ proc: CompletedProcess[Any] | None = None,
+ ) -> None:
+ """Construct generic library exception."""
+ super().__init__(message)
+ self.proc = proc
+
+
+class AnsibleCommandError(RuntimeError):
+ """Exception running an Ansible command."""
+
+ def __init__(self, proc: CompletedProcess[Any]) -> None:
+ """Construct an exception given a completed process."""
+ message = (
+ f"Got {proc.returncode} exit code while running: {' '.join(proc.args)}"
+ )
+ super().__init__(message)
+ self.proc = proc
+
+
+class MissingAnsibleError(AnsibleCompatError):
+ """Reports a missing or broken Ansible installation."""
+
+ code = ANSIBLE_MISSING_RC
+
+ def __init__(
+ self,
+ message: str | None = "Unable to find a working copy of ansible executable.",
+ proc: CompletedProcess[Any] | None = None,
+ ) -> None:
+ """."""
+ super().__init__(message)
+ self.proc = proc
+
+
+class InvalidPrerequisiteError(AnsibleCompatError):
+ """Reports a missing requirement."""
+
+ code = INVALID_PREREQUISITES_RC
diff --git a/src/ansible_compat/loaders.py b/src/ansible_compat/loaders.py
new file mode 100644
index 0000000..d2ae080
--- /dev/null
+++ b/src/ansible_compat/loaders.py
@@ -0,0 +1,30 @@
+"""Utilities for loading various files."""
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+
+import yaml
+
+from ansible_compat.errors import InvalidPrerequisiteError
+
+if TYPE_CHECKING:
+ from pathlib import Path
+
+
+def yaml_from_file(path: Path) -> Any: # noqa: ANN401
+ """Return a loaded YAML file."""
+ with path.open(encoding="utf-8") as content:
+ return yaml.load(content, Loader=yaml.SafeLoader)
+
+
+def colpath_from_path(path: Path) -> str | None:
+ """Return a FQCN from a path."""
+ galaxy_file = path / "galaxy.yml"
+ if galaxy_file.exists():
+ galaxy = yaml_from_file(galaxy_file)
+ for k in ("namespace", "name"):
+ if k not in galaxy:
+ msg = f"{galaxy_file} is missing the following mandatory field {k}"
+ raise InvalidPrerequisiteError(msg)
+ return f"{galaxy['namespace']}/{galaxy['name']}"
+ return None
diff --git a/src/ansible_compat/ports.py b/src/ansible_compat/ports.py
new file mode 100644
index 0000000..9c46ae6
--- /dev/null
+++ b/src/ansible_compat/ports.py
@@ -0,0 +1,4 @@
+"""Portability helpers."""
+from functools import cache, cached_property
+
+__all__ = ["cache", "cached_property"]
diff --git a/src/ansible_compat/prerun.py b/src/ansible_compat/prerun.py
new file mode 100644
index 0000000..6dfa44f
--- /dev/null
+++ b/src/ansible_compat/prerun.py
@@ -0,0 +1,21 @@
+"""Utilities for configuring ansible runtime environment."""
+import hashlib
+import os
+from pathlib import Path
+
+
+def get_cache_dir(project_dir: Path) -> Path:
+ """Compute cache directory to be used based on project path."""
+ # we only use the basename instead of the full path in order to ensure that
+ # we would use the same key regardless the location of the user home
+ # directory or where the project is clones (as long the project folder uses
+ # the same name).
+ basename = project_dir.resolve().name.encode(encoding="utf-8")
+ # 6 chars of entropy should be enough
+ cache_key = hashlib.sha256(basename).hexdigest()[:6]
+ cache_dir = (
+ Path(os.getenv("XDG_CACHE_HOME", "~/.cache")).expanduser()
+ / "ansible-compat"
+ / cache_key
+ )
+ return cache_dir
diff --git a/src/ansible_compat/py.typed b/src/ansible_compat/py.typed
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/ansible_compat/py.typed
diff --git a/src/ansible_compat/runtime.py b/src/ansible_compat/runtime.py
new file mode 100644
index 0000000..ad81132
--- /dev/null
+++ b/src/ansible_compat/runtime.py
@@ -0,0 +1,961 @@
+"""Ansible runtime environment manager."""
+from __future__ import annotations
+
+import contextlib
+import importlib
+import json
+import logging
+import os
+import re
+import shutil
+import subprocess
+import sys
+import warnings
+from collections import OrderedDict
+from dataclasses import dataclass, field
+from pathlib import Path
+from typing import TYPE_CHECKING, Any, Callable, no_type_check
+
+import subprocess_tee
+from packaging.version import Version
+
+from ansible_compat.config import (
+ AnsibleConfig,
+ ansible_collections_path,
+ ansible_version,
+ parse_ansible_version,
+)
+from ansible_compat.constants import (
+ META_MAIN,
+ MSG_INVALID_FQRL,
+ RC_ANSIBLE_OPTIONS_ERROR,
+ REQUIREMENT_LOCATIONS,
+)
+from ansible_compat.errors import (
+ AnsibleCommandError,
+ AnsibleCompatError,
+ InvalidPrerequisiteError,
+ MissingAnsibleError,
+)
+from ansible_compat.loaders import colpath_from_path, yaml_from_file
+from ansible_compat.prerun import get_cache_dir
+
+if TYPE_CHECKING:
+ # https://github.com/PyCQA/pylint/issues/3240
+ # pylint: disable=unsubscriptable-object
+ CompletedProcess = subprocess.CompletedProcess[Any]
+else:
+ CompletedProcess = subprocess.CompletedProcess
+
+
+_logger = logging.getLogger(__name__)
+# regex to extract the first version from a collection range specifier
+version_re = re.compile(":[>=<]*([^,]*)")
+namespace_re = re.compile("^[a-z][a-z0-9_]+$")
+
+
+class AnsibleWarning(Warning):
+ """Warnings related to Ansible runtime."""
+
+
+@dataclass
+class Collection:
+ """Container for Ansible collection information."""
+
+ name: str
+ version: str
+ path: Path
+
+
+class CollectionVersion(Version):
+ """Collection version."""
+
+ def __init__(self, version: str) -> None:
+ """Initialize collection version."""
+ # As packaging Version class does not support wildcard, we convert it
+ # to "0", as this being the smallest version possible.
+ if version == "*":
+ version = "0"
+ super().__init__(version)
+
+
+@dataclass
+class Plugins: # pylint: disable=too-many-instance-attributes
+ """Dataclass to access installed Ansible plugins, uses ansible-doc to retrieve them."""
+
+ runtime: Runtime
+ become: dict[str, str] = field(init=False)
+ cache: dict[str, str] = field(init=False)
+ callback: dict[str, str] = field(init=False)
+ cliconf: dict[str, str] = field(init=False)
+ connection: dict[str, str] = field(init=False)
+ httpapi: dict[str, str] = field(init=False)
+ inventory: dict[str, str] = field(init=False)
+ lookup: dict[str, str] = field(init=False)
+ netconf: dict[str, str] = field(init=False)
+ shell: dict[str, str] = field(init=False)
+ vars: dict[str, str] = field(init=False) # noqa: A003
+ module: dict[str, str] = field(init=False)
+ strategy: dict[str, str] = field(init=False)
+ test: dict[str, str] = field(init=False)
+ filter: dict[str, str] = field(init=False) # noqa: A003
+ role: dict[str, str] = field(init=False)
+ keyword: dict[str, str] = field(init=False)
+
+ @no_type_check
+ def __getattribute__(self, attr: str): # noqa: ANN204
+ """Get attribute."""
+ if attr in {
+ "become",
+ "cache",
+ "callback",
+ "cliconf",
+ "connection",
+ "httpapi",
+ "inventory",
+ "lookup",
+ "netconf",
+ "shell",
+ "vars",
+ "module",
+ "strategy",
+ "test",
+ "filter",
+ "role",
+ "keyword",
+ }:
+ try:
+ result = super().__getattribute__(attr)
+ except AttributeError as exc:
+ if ansible_version() < Version("2.14") and attr in {"filter", "test"}:
+ msg = "Ansible version below 2.14 does not support retrieving filter and test plugins."
+ raise RuntimeError(msg) from exc
+ proc = self.runtime.run(
+ ["ansible-doc", "--json", "-l", "-t", attr],
+ )
+ data = json.loads(proc.stdout)
+ if not isinstance(data, dict): # pragma: no cover
+ msg = "Unexpected output from ansible-doc"
+ raise AnsibleCompatError(msg) from exc
+ result = data
+ else:
+ result = super().__getattribute__(attr)
+
+ return result
+
+
+# pylint: disable=too-many-instance-attributes
+class Runtime:
+ """Ansible Runtime manager."""
+
+ _version: Version | None = None
+ collections: OrderedDict[str, Collection] = OrderedDict()
+ cache_dir: Path | None = None
+ # Used to track if we have already initialized the Ansible runtime as attempts
+ # to do it multiple tilmes will cause runtime warnings from within ansible-core
+ initialized: bool = False
+ plugins: Plugins
+
+ def __init__(
+ self,
+ project_dir: Path | None = None,
+ *,
+ isolated: bool = False,
+ min_required_version: str | None = None,
+ require_module: bool = False,
+ max_retries: int = 0,
+ environ: dict[str, str] | None = None,
+ verbosity: int = 0,
+ ) -> None:
+ """Initialize Ansible runtime environment.
+
+ :param project_dir: The directory containing the Ansible project. If
+ not mentioned it will be guessed from the current
+ working directory.
+ :param isolated: Assure that installation of collections or roles
+ does not affect Ansible installation, an unique cache
+ directory being used instead.
+ :param min_required_version: Minimal version of Ansible required. If
+ not found, a :class:`RuntimeError`
+ exception is raised.
+ :param require_module: If set, instantiation will fail if Ansible
+ Python module is missing or is not matching
+ the same version as the Ansible command line.
+ That is useful for consumers that expect to
+ also perform Python imports from Ansible.
+ :param max_retries: Number of times it should retry network operations.
+ Default is 0, no retries.
+ :param environ: Environment dictionary to use, if undefined
+ ``os.environ`` will be copied and used.
+ :param verbosity: Verbosity level to use.
+ """
+ self.project_dir = project_dir or Path.cwd()
+ self.isolated = isolated
+ self.max_retries = max_retries
+ self.environ = environ or os.environ.copy()
+ self.plugins = Plugins(runtime=self)
+ self.verbosity = verbosity
+
+ self.initialize_logger(level=self.verbosity)
+
+ # Reduce noise from paramiko, unless user already defined PYTHONWARNINGS
+ # paramiko/transport.py:236: CryptographyDeprecationWarning: Blowfish has been deprecated
+ # https://github.com/paramiko/paramiko/issues/2038
+ # As CryptographyDeprecationWarning is not a builtin, we cannot use
+ # PYTHONWARNINGS to ignore it using category but we can use message.
+ # https://stackoverflow.com/q/68251969/99834
+ if "PYTHONWARNINGS" not in self.environ: # pragma: no cover
+ self.environ["PYTHONWARNINGS"] = "ignore:Blowfish has been deprecated"
+
+ if isolated:
+ self.cache_dir = get_cache_dir(self.project_dir)
+ self.config = AnsibleConfig()
+
+ # Add the sys.path to the collection paths if not isolated
+ self._add_sys_path_to_collection_paths()
+
+ if not self.version_in_range(lower=min_required_version):
+ msg = f"Found incompatible version of ansible runtime {self.version}, instead of {min_required_version} or newer."
+ raise RuntimeError(msg)
+ if require_module:
+ self._ensure_module_available()
+
+ # pylint: disable=import-outside-toplevel
+ from ansible.utils.display import Display
+
+ # pylint: disable=unused-argument
+ def warning(
+ self: Display, # noqa: ARG001
+ msg: str,
+ *,
+ formatted: bool = False, # noqa: ARG001
+ ) -> None:
+ """Override ansible.utils.display.Display.warning to avoid printing warnings."""
+ warnings.warn(
+ message=msg,
+ category=AnsibleWarning,
+ stacklevel=2,
+ source={"msg": msg},
+ )
+
+ # Monkey patch ansible warning in order to use warnings module.
+ Display.warning = warning
+
+ def initialize_logger(self, level: int = 0) -> None:
+ """Set up the global logging level based on the verbosity number."""
+ verbosity_map = {
+ -2: logging.CRITICAL,
+ -1: logging.ERROR,
+ 0: logging.WARNING,
+ 1: logging.INFO,
+ 2: logging.DEBUG,
+ }
+ # Unknown logging level is treated as DEBUG
+ logging_level = verbosity_map.get(level, logging.DEBUG)
+ _logger.setLevel(logging_level)
+ # Use module-level _logger instance to validate it
+ _logger.debug("Logging initialized to level %s", logging_level)
+
+ def _add_sys_path_to_collection_paths(self) -> None:
+ """Add the sys.path to the collection paths."""
+ if self.config.collections_scan_sys_path:
+ for path in sys.path:
+ if (
+ path not in self.config.collections_paths
+ and (Path(path) / "ansible_collections").is_dir()
+ ):
+ self.config.collections_paths.append( # pylint: disable=E1101
+ path,
+ )
+
+ def load_collections(self) -> None:
+ """Load collection data."""
+ self.collections = OrderedDict()
+ no_collections_msg = "None of the provided paths were usable"
+
+ proc = self.run(["ansible-galaxy", "collection", "list", "--format=json"])
+ if proc.returncode == RC_ANSIBLE_OPTIONS_ERROR and (
+ no_collections_msg in proc.stdout or no_collections_msg in proc.stderr
+ ):
+ _logger.debug("Ansible reported no installed collections at all.")
+ return
+ if proc.returncode != 0:
+ _logger.error(proc)
+ msg = f"Unable to list collections: {proc}"
+ raise RuntimeError(msg)
+ data = json.loads(proc.stdout)
+ if not isinstance(data, dict):
+ msg = f"Unexpected collection data, {data}"
+ raise TypeError(msg)
+ for path in data:
+ for collection, collection_info in data[path].items():
+ if not isinstance(collection, str):
+ msg = f"Unexpected collection data, {collection}"
+ raise TypeError(msg)
+ if not isinstance(collection_info, dict):
+ msg = f"Unexpected collection data, {collection_info}"
+ raise TypeError(msg)
+
+ self.collections[collection] = Collection(
+ name=collection,
+ version=collection_info["version"],
+ path=path,
+ )
+
+ def _ensure_module_available(self) -> None:
+ """Assure that Ansible Python module is installed and matching CLI version."""
+ ansible_release_module = None
+ with contextlib.suppress(ModuleNotFoundError, ImportError):
+ ansible_release_module = importlib.import_module("ansible.release")
+
+ if ansible_release_module is None:
+ msg = "Unable to find Ansible python module."
+ raise RuntimeError(msg)
+
+ ansible_module_version = Version(
+ ansible_release_module.__version__,
+ )
+ if ansible_module_version != self.version:
+ msg = f"Ansible CLI ({self.version}) and python module ({ansible_module_version}) versions do not match. This indicates a broken execution environment."
+ raise RuntimeError(msg)
+
+ # For ansible 2.15+ we need to initialize the plugin loader
+ # https://github.com/ansible/ansible-lint/issues/2945
+ if not Runtime.initialized:
+ col_path = [f"{self.cache_dir}/collections"]
+ if self.version >= Version("2.15.0.dev0"):
+ # pylint: disable=import-outside-toplevel,no-name-in-module
+ from ansible.plugins.loader import init_plugin_loader
+
+ init_plugin_loader(col_path)
+ else:
+ # noinspection PyProtectedMember
+ from ansible.utils.collection_loader._collection_finder import ( # pylint: disable=import-outside-toplevel
+ _AnsibleCollectionFinder,
+ )
+
+ # noinspection PyProtectedMember
+ # pylint: disable=protected-access
+ col_path += self.config.collections_paths
+ col_path += os.path.dirname( # noqa: PTH120
+ os.environ.get(ansible_collections_path(), "."),
+ ).split(":")
+ _AnsibleCollectionFinder( # noqa: SLF001
+ paths=col_path,
+ )._install() # pylint: disable=protected-access
+ Runtime.initialized = True
+
+ def clean(self) -> None:
+ """Remove content of cache_dir."""
+ if self.cache_dir:
+ shutil.rmtree(self.cache_dir, ignore_errors=True)
+
+ def run( # ruff: disable=PLR0913
+ self,
+ args: str | list[str],
+ *,
+ retry: bool = False,
+ tee: bool = False,
+ env: dict[str, str] | None = None,
+ cwd: Path | None = None,
+ ) -> CompletedProcess:
+ """Execute a command inside an Ansible environment.
+
+ :param retry: Retry network operations on failures.
+ :param tee: Also pass captured stdout/stderr to system while running.
+ """
+ if tee:
+ run_func: Callable[..., CompletedProcess] = subprocess_tee.run
+ else:
+ run_func = subprocess.run
+ env = self.environ if env is None else env.copy()
+ # Presence of ansible debug variable or config option will prevent us
+ # from parsing its JSON output due to extra debug messages on stdout.
+ env["ANSIBLE_DEBUG"] = "0"
+
+ # https://github.com/ansible/ansible-lint/issues/3522
+ env["ANSIBLE_VERBOSE_TO_STDERR"] = "True"
+
+ for _ in range(self.max_retries + 1 if retry else 1):
+ result = run_func(
+ args,
+ universal_newlines=True,
+ check=False,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ env=env,
+ cwd=str(cwd) if cwd else None,
+ )
+ if result.returncode == 0:
+ break
+ _logger.debug("Environment: %s", env)
+ if retry:
+ _logger.warning(
+ "Retrying execution failure %s of: %s",
+ result.returncode,
+ " ".join(args),
+ )
+ return result
+
+ @property
+ def version(self) -> Version:
+ """Return current Version object for Ansible.
+
+ If version is not mentioned, it returns current version as detected.
+ When version argument is mentioned, it return converts the version string
+ to Version object in order to make it usable in comparisons.
+ """
+ if self._version:
+ return self._version
+
+ proc = self.run(["ansible", "--version"])
+ if proc.returncode == 0:
+ self._version = parse_ansible_version(proc.stdout)
+ return self._version
+
+ msg = "Unable to find a working copy of ansible executable."
+ raise MissingAnsibleError(msg, proc=proc)
+
+ def version_in_range(
+ self,
+ lower: str | None = None,
+ upper: str | None = None,
+ ) -> bool:
+ """Check if Ansible version is inside a required range.
+
+ The lower limit is inclusive and the upper one exclusive.
+ """
+ if lower and self.version < Version(lower):
+ return False
+ if upper and self.version >= Version(upper):
+ return False
+ return True
+
+ def install_collection(
+ self,
+ collection: str | Path,
+ *,
+ destination: Path | None = None,
+ force: bool = False,
+ ) -> None:
+ """Install an Ansible collection.
+
+ Can accept arguments like:
+ 'foo.bar:>=1.2.3'
+ 'git+https://github.com/ansible-collections/ansible.posix.git,main'
+ """
+ cmd = [
+ "ansible-galaxy",
+ "collection",
+ "install",
+ "-vvv", # this is needed to make ansible display important info in case of failures
+ ]
+ if force:
+ cmd.append("--force")
+
+ if isinstance(collection, Path):
+ collection = str(collection)
+ # As ansible-galaxy install is not able to automatically determine
+ # if the range requires a pre-release, we need to manually add the --pre
+ # flag when needed.
+ matches = version_re.search(collection)
+
+ if (
+ not is_url(collection)
+ and matches
+ and CollectionVersion(matches[1]).is_prerelease
+ ):
+ cmd.append("--pre")
+
+ cpaths: list[str] = self.config.collections_paths
+ if destination and str(destination) not in cpaths:
+ # we cannot use '-p' because it breaks galaxy ability to ignore already installed collections, so
+ # we hack ansible_collections_path instead and inject our own path there.
+ # pylint: disable=no-member
+ cpaths.insert(0, str(destination))
+ cmd.append(f"{collection}")
+
+ _logger.info("Running from %s : %s", Path.cwd(), " ".join(cmd))
+ process = self.run(
+ cmd,
+ retry=True,
+ env={**self.environ, ansible_collections_path(): ":".join(cpaths)},
+ )
+ if process.returncode != 0:
+ msg = f"Command returned {process.returncode} code:\n{process.stdout}\n{process.stderr}"
+ _logger.error(msg)
+ raise InvalidPrerequisiteError(msg)
+
+ def install_collection_from_disk(
+ self,
+ path: Path,
+ destination: Path | None = None,
+ ) -> None:
+ """Build and install collection from a given disk path."""
+ self.install_collection(path, destination=destination, force=True)
+
+ # pylint: disable=too-many-branches
+ def install_requirements( # noqa: C901
+ self,
+ requirement: Path,
+ *,
+ retry: bool = False,
+ offline: bool = False,
+ ) -> None:
+ """Install dependencies from a requirements.yml.
+
+ :param requirement: path to requirements.yml file
+ :param retry: retry network operations on failures
+ :param offline: bypass installation, may fail if requirements are not met.
+ """
+ if not Path(requirement).exists():
+ return
+ reqs_yaml = yaml_from_file(Path(requirement))
+ if not isinstance(reqs_yaml, (dict, list)):
+ msg = f"{requirement} file is not a valid Ansible requirements file."
+ raise InvalidPrerequisiteError(msg)
+
+ if isinstance(reqs_yaml, dict):
+ for key in reqs_yaml:
+ if key not in ("roles", "collections"):
+ msg = f"{requirement} file is not a valid Ansible requirements file. Only 'roles' and 'collections' keys are allowed at root level. Recognized valid locations are: {', '.join(REQUIREMENT_LOCATIONS)}"
+ raise InvalidPrerequisiteError(msg)
+
+ if isinstance(reqs_yaml, list) or "roles" in reqs_yaml:
+ cmd = [
+ "ansible-galaxy",
+ "role",
+ "install",
+ "-r",
+ f"{requirement}",
+ ]
+ if self.verbosity > 0:
+ cmd.extend(["-" + ("v" * self.verbosity)])
+ if self.cache_dir:
+ cmd.extend(["--roles-path", f"{self.cache_dir}/roles"])
+
+ if offline:
+ _logger.warning(
+ "Skipped installing old role dependencies due to running in offline mode.",
+ )
+ else:
+ _logger.info("Running %s", " ".join(cmd))
+
+ result = self.run(cmd, retry=retry)
+ _logger.debug(result.stdout)
+ if result.returncode != 0:
+ _logger.error(result.stderr)
+ raise AnsibleCommandError(result)
+
+ # Run galaxy collection install works on v2 requirements.yml
+ if "collections" in reqs_yaml and reqs_yaml["collections"] is not None:
+ cmd = [
+ "ansible-galaxy",
+ "collection",
+ "install",
+ ]
+ if self.verbosity > 0:
+ cmd.extend(["-" + ("v" * self.verbosity)])
+
+ for collection in reqs_yaml["collections"]:
+ if isinstance(collection, dict) and collection.get("type", "") == "git":
+ _logger.info(
+ "Adding '--pre' to ansible-galaxy collection install because we detected one collection being sourced from git.",
+ )
+ cmd.append("--pre")
+ break
+ if offline:
+ _logger.warning(
+ "Skipped installing collection dependencies due to running in offline mode.",
+ )
+ else:
+ cmd.extend(["-r", str(requirement)])
+ cpaths = self.config.collections_paths
+ if self.cache_dir:
+ # we cannot use '-p' because it breaks galaxy ability to ignore already installed collections, so
+ # we hack ansible_collections_path instead and inject our own path there.
+ dest_path = f"{self.cache_dir}/collections"
+ if dest_path not in cpaths:
+ # pylint: disable=no-member
+ cpaths.insert(0, dest_path)
+ _logger.info("Running %s", " ".join(cmd))
+ result = self.run(
+ cmd,
+ retry=retry,
+ env={**os.environ, "ANSIBLE_COLLECTIONS_PATH": ":".join(cpaths)},
+ )
+ _logger.debug(result.stdout)
+ if result.returncode != 0:
+ _logger.error(result.stderr)
+ raise AnsibleCommandError(result)
+
+ def prepare_environment( # noqa: C901
+ self,
+ required_collections: dict[str, str] | None = None,
+ *,
+ retry: bool = False,
+ install_local: bool = False,
+ offline: bool = False,
+ role_name_check: int = 0,
+ ) -> None:
+ """Make dependencies available if needed."""
+ destination: Path | None = None
+ if required_collections is None:
+ required_collections = {}
+
+ # first one is standard for collection layout repos and the last two
+ # are part of Tower specification
+ # https://docs.ansible.com/ansible-tower/latest/html/userguide/projects.html#ansible-galaxy-support
+ # https://docs.ansible.com/ansible-tower/latest/html/userguide/projects.html#collections-support
+ for req_file in REQUIREMENT_LOCATIONS:
+ self.install_requirements(Path(req_file), retry=retry, offline=offline)
+
+ self._prepare_ansible_paths()
+
+ if not install_local:
+ return
+
+ for gpath in search_galaxy_paths(self.project_dir):
+ # processing all found galaxy.yml files
+ galaxy_path = Path(gpath)
+ if galaxy_path.exists():
+ data = yaml_from_file(galaxy_path)
+ if isinstance(data, dict) and "dependencies" in data:
+ for name, required_version in data["dependencies"].items():
+ _logger.info(
+ "Provisioning collection %s:%s from galaxy.yml",
+ name,
+ required_version,
+ )
+ self.install_collection(
+ f"{name}{',' if is_url(name) else ':'}{required_version}",
+ destination=destination,
+ )
+
+ if self.cache_dir:
+ destination = self.cache_dir / "collections"
+ for name, min_version in required_collections.items():
+ self.install_collection(
+ f"{name}:>={min_version}",
+ destination=destination,
+ )
+
+ if (self.project_dir / "galaxy.yml").exists():
+ if destination:
+ # while function can return None, that would not break the logic
+ colpath = Path(
+ f"{destination}/ansible_collections/{colpath_from_path(self.project_dir)}",
+ )
+ if colpath.is_symlink():
+ if os.path.realpath(colpath) == str(Path.cwd()):
+ _logger.warning(
+ "Found symlinked collection, skipping its installation.",
+ )
+ return
+ _logger.warning(
+ "Collection is symlinked, but not pointing to %s directory, so we will remove it.",
+ Path.cwd(),
+ )
+ colpath.unlink()
+
+ # molecule scenario within a collection
+ self.install_collection_from_disk(
+ galaxy_path.parent,
+ destination=destination,
+ )
+ elif (
+ Path().resolve().parent.name == "roles"
+ and Path("../../galaxy.yml").exists()
+ ):
+ # molecule scenario located within roles/<role-name>/molecule inside
+ # a collection
+ self.install_collection_from_disk(
+ Path("../.."),
+ destination=destination,
+ )
+ else:
+ # no collection, try to recognize and install a standalone role
+ self._install_galaxy_role(
+ self.project_dir,
+ role_name_check=role_name_check,
+ ignore_errors=True,
+ )
+ # reload collections
+ self.load_collections()
+
+ def require_collection(
+ self,
+ name: str,
+ version: str | None = None,
+ *,
+ install: bool = True,
+ ) -> tuple[CollectionVersion, Path]:
+ """Check if a minimal collection version is present or exits.
+
+ In the future this method may attempt to install a missing or outdated
+ collection before failing.
+
+ :param name: collection name
+ :param version: minimal version required
+ :param install: if True, attempt to install a missing collection
+ :returns: tuple of (found_version, collection_path)
+ """
+ try:
+ ns, coll = name.split(".", 1)
+ except ValueError as exc:
+ msg = f"Invalid collection name supplied: {name}%s"
+ raise InvalidPrerequisiteError(
+ msg,
+ ) from exc
+
+ paths: list[str] = self.config.collections_paths
+ if not paths or not isinstance(paths, list):
+ msg = f"Unable to determine ansible collection paths. ({paths})"
+ raise InvalidPrerequisiteError(
+ msg,
+ )
+
+ if self.cache_dir:
+ # if we have a cache dir, we want to be use that would be preferred
+ # destination when installing a missing collection
+ # https://github.com/PyCQA/pylint/issues/4667
+ paths.insert(0, f"{self.cache_dir}/collections") # pylint: disable=E1101
+
+ for path in paths:
+ collpath = Path(path) / "ansible_collections" / ns / coll
+ if collpath.exists():
+ mpath = collpath / "MANIFEST.json"
+ if not mpath.exists():
+ msg = f"Found collection at '{collpath}' but missing MANIFEST.json, cannot get info."
+ _logger.fatal(msg)
+ raise InvalidPrerequisiteError(msg)
+
+ with mpath.open(encoding="utf-8") as f:
+ manifest = json.loads(f.read())
+ found_version = CollectionVersion(
+ manifest["collection_info"]["version"],
+ )
+ if version and found_version < CollectionVersion(version):
+ if install:
+ self.install_collection(f"{name}:>={version}")
+ self.require_collection(name, version, install=False)
+ else:
+ msg = f"Found {name} collection {found_version} but {version} or newer is required."
+ _logger.fatal(msg)
+ raise InvalidPrerequisiteError(msg)
+ return found_version, collpath.resolve()
+ break
+ else:
+ if install:
+ self.install_collection(f"{name}:>={version}" if version else name)
+ return self.require_collection(
+ name=name,
+ version=version,
+ install=False,
+ )
+ msg = f"Collection '{name}' not found in '{paths}'"
+ _logger.fatal(msg)
+ raise InvalidPrerequisiteError(msg)
+
+ def _prepare_ansible_paths(self) -> None:
+ """Configure Ansible environment variables."""
+ try:
+ library_paths: list[str] = self.config.default_module_path.copy()
+ roles_path: list[str] = self.config.default_roles_path.copy()
+ collections_path: list[str] = self.config.collections_paths.copy()
+ except AttributeError as exc:
+ msg = "Unexpected ansible configuration"
+ raise RuntimeError(msg) from exc
+
+ alterations_list: list[tuple[list[str], str, bool]] = [
+ (library_paths, "plugins/modules", True),
+ (roles_path, "roles", True),
+ ]
+
+ alterations_list.extend(
+ [
+ (roles_path, f"{self.cache_dir}/roles", False),
+ (library_paths, f"{self.cache_dir}/modules", False),
+ (collections_path, f"{self.cache_dir}/collections", False),
+ ]
+ if self.isolated
+ else [],
+ )
+
+ for path_list, path_, must_be_present in alterations_list:
+ path = Path(path_)
+ if not path.exists():
+ if must_be_present:
+ continue
+ path.mkdir(parents=True, exist_ok=True)
+ if str(path) not in path_list:
+ path_list.insert(0, str(path))
+
+ if library_paths != self.config.DEFAULT_MODULE_PATH:
+ self._update_env("ANSIBLE_LIBRARY", library_paths)
+ if collections_path != self.config.default_collections_path:
+ self._update_env(ansible_collections_path(), collections_path)
+ if roles_path != self.config.default_roles_path:
+ self._update_env("ANSIBLE_ROLES_PATH", roles_path)
+
+ def _get_roles_path(self) -> Path:
+ """Return roles installation path.
+
+ If `self.isolated` is set to `True`, `self.cache_dir` would be
+ created, then it returns the `self.cache_dir/roles`. When `self.isolated` is
+ not mentioned or set to `False`, it returns the first path in
+ `default_roles_path`.
+ """
+ if self.cache_dir:
+ path = Path(f"{self.cache_dir}/roles")
+ else:
+ path = Path(self.config.default_roles_path[0]).expanduser()
+ return path
+
+ def _install_galaxy_role(
+ self,
+ project_dir: Path,
+ role_name_check: int = 0,
+ *,
+ ignore_errors: bool = False,
+ ) -> None:
+ """Detect standalone galaxy role and installs it.
+
+ :param: role_name_check: logic to used to check role name
+ 0: exit with error if name is not compliant (default)
+ 1: warn if name is not compliant
+ 2: bypass any name checking
+
+ :param: ignore_errors: if True, bypass installing invalid roles.
+
+ Our implementation aims to match ansible-galaxy's behaviour for installing
+ roles from a tarball or scm. For example ansible-galaxy will install a role
+ that has both galaxy.yml and meta/main.yml present but empty. Also missing
+ galaxy.yml is accepted but missing meta/main.yml is not.
+ """
+ yaml = None
+ galaxy_info = {}
+
+ for meta_main in META_MAIN:
+ meta_filename = Path(project_dir) / meta_main
+
+ if meta_filename.exists():
+ break
+ else:
+ if ignore_errors:
+ return
+
+ yaml = yaml_from_file(meta_filename)
+
+ if yaml and "galaxy_info" in yaml:
+ galaxy_info = yaml["galaxy_info"]
+
+ fqrn = _get_role_fqrn(galaxy_info, project_dir)
+
+ if role_name_check in [0, 1]:
+ if not re.match(r"[a-z0-9][a-z0-9_]+\.[a-z][a-z0-9_]+$", fqrn):
+ msg = MSG_INVALID_FQRL.format(fqrn)
+ if role_name_check == 1:
+ _logger.warning(msg)
+ else:
+ _logger.error(msg)
+ raise InvalidPrerequisiteError(msg)
+ elif "role_name" in galaxy_info:
+ # when 'role-name' is in skip_list, we stick to plain role names
+ role_namespace = _get_galaxy_role_ns(galaxy_info)
+ role_name = _get_galaxy_role_name(galaxy_info)
+ fqrn = f"{role_namespace}{role_name}"
+ else:
+ fqrn = Path(project_dir).absolute().name
+ path = self._get_roles_path()
+ path.mkdir(parents=True, exist_ok=True)
+ link_path = path / fqrn
+ # despite documentation stating that is_file() reports true for symlinks,
+ # it appears that is_dir() reports true instead, so we rely on exists().
+ target = Path(project_dir).absolute()
+ if not link_path.exists() or (
+ link_path.is_symlink() and link_path.readlink() != target
+ ):
+ # must call unlink before checking exists because a broken
+ # link reports as not existing and we want to repair it
+ link_path.unlink(missing_ok=True)
+ # https://github.com/python/cpython/issues/73843
+ link_path.symlink_to(str(target), target_is_directory=True)
+ _logger.info(
+ "Using %s symlink to current repository in order to enable Ansible to find the role using its expected full name.",
+ link_path,
+ )
+
+ def _update_env(self, varname: str, value: list[str], default: str = "") -> None:
+ """Update colon based environment variable if needed.
+
+ New values are prepended to make sure they take precedence.
+ """
+ if not value:
+ return
+ orig_value = self.environ.get(varname, default)
+ if orig_value:
+ value = [*value, *orig_value.split(":")]
+ value_str = ":".join(value)
+ if value_str != self.environ.get(varname, ""):
+ self.environ[varname] = value_str
+ _logger.info("Set %s=%s", varname, value_str)
+
+
+def _get_role_fqrn(galaxy_infos: dict[str, Any], project_dir: Path) -> str:
+ """Compute role fqrn."""
+ role_namespace = _get_galaxy_role_ns(galaxy_infos)
+ role_name = _get_galaxy_role_name(galaxy_infos)
+
+ if len(role_name) == 0:
+ role_name = Path(project_dir).absolute().name
+ role_name = re.sub(r"(ansible-|ansible-role-)", "", role_name).split(
+ ".",
+ maxsplit=2,
+ )[-1]
+
+ return f"{role_namespace}{role_name}"
+
+
+def _get_galaxy_role_ns(galaxy_infos: dict[str, Any]) -> str:
+ """Compute role namespace from meta/main.yml, including trailing dot."""
+ role_namespace = galaxy_infos.get("namespace", "")
+ if len(role_namespace) == 0:
+ role_namespace = galaxy_infos.get("author", "")
+ if not isinstance(role_namespace, str):
+ msg = f"Role namespace must be string, not {role_namespace}"
+ raise AnsibleCompatError(msg)
+ # if there's a space in the name space, it's likely author name
+ # and not the galaxy login, so act as if there was no namespace
+ if not role_namespace or re.match(r"^\w+ \w+", role_namespace):
+ role_namespace = ""
+ else:
+ role_namespace = f"{role_namespace}."
+ return role_namespace
+
+
+def _get_galaxy_role_name(galaxy_infos: dict[str, Any]) -> str:
+ """Compute role name from meta/main.yml."""
+ result = galaxy_infos.get("role_name", "")
+ if not isinstance(result, str):
+ return ""
+ return result
+
+
+def search_galaxy_paths(search_dir: Path) -> list[str]:
+ """Search for galaxy paths (only one level deep)."""
+ galaxy_paths: list[str] = []
+ for file in [".", *os.listdir(search_dir)]:
+ # We ignore any folders that are not valid namespaces, just like
+ # ansible galaxy does at this moment.
+ if file != "." and not namespace_re.match(file):
+ continue
+ file_path = search_dir / file / "galaxy.yml"
+ if file_path.is_file():
+ galaxy_paths.append(str(file_path))
+ return galaxy_paths
+
+
+def is_url(name: str) -> bool:
+ """Return True if a dependency name looks like an URL."""
+ return bool(re.match("^git[+@]", name))
diff --git a/src/ansible_compat/schema.py b/src/ansible_compat/schema.py
new file mode 100644
index 0000000..2950e08
--- /dev/null
+++ b/src/ansible_compat/schema.py
@@ -0,0 +1,110 @@
+"""Utils for JSON Schema validation."""
+from __future__ import annotations
+
+import json
+from collections.abc import Mapping, Sequence
+from dataclasses import dataclass
+from typing import TYPE_CHECKING
+
+import jsonschema
+from jsonschema.validators import validator_for
+
+if TYPE_CHECKING:
+ from ansible_compat.types import JSON
+
+
+def to_path(schema_path: Sequence[str | int]) -> str:
+ """Flatten a path to a dot delimited string.
+
+ :param schema_path: The schema path
+ :returns: The dot delimited path
+ """
+ return ".".join(str(index) for index in schema_path)
+
+
+def json_path(absolute_path: Sequence[str | int]) -> str:
+ """Flatten a data path to a dot delimited string.
+
+ :param absolute_path: The path
+ :returns: The dot delimited string
+ """
+ path = "$"
+ for elem in absolute_path:
+ if isinstance(elem, int):
+ path += "[" + str(elem) + "]"
+ else:
+ path += "." + elem
+ return path
+
+
+@dataclass(order=True)
+class JsonSchemaError:
+ # pylint: disable=too-many-instance-attributes
+ """Data structure to hold a json schema validation error."""
+
+ # order of attributes below is important for sorting
+ schema_path: str
+ data_path: str
+ json_path: str
+ message: str
+ expected: bool | int | str
+ relative_schema: str
+ validator: str
+ found: str
+
+ def to_friendly(self) -> str:
+ """Provide a friendly explanation of the error.
+
+ :returns: The error message
+ """
+ return f"In '{self.data_path}': {self.message}."
+
+
+def validate(
+ schema: JSON,
+ data: JSON,
+) -> list[JsonSchemaError]:
+ """Validate some data against a JSON schema.
+
+ :param schema: the JSON schema to use for validation
+ :param data: The data to validate
+ :returns: Any errors encountered
+ """
+ errors: list[JsonSchemaError] = []
+
+ if isinstance(schema, str):
+ schema = json.loads(schema)
+ try:
+ if not isinstance(schema, Mapping):
+ msg = "Invalid schema, must be a mapping"
+ raise jsonschema.SchemaError(msg) # noqa: TRY301
+ validator = validator_for(schema)
+ validator.check_schema(schema)
+ except jsonschema.SchemaError as exc:
+ error = JsonSchemaError(
+ message=str(exc),
+ data_path="schema sanity check",
+ json_path="",
+ schema_path="",
+ relative_schema="",
+ expected="",
+ validator="",
+ found="",
+ )
+ errors.append(error)
+ return errors
+
+ for validation_error in validator(schema).iter_errors(data):
+ if isinstance(validation_error, jsonschema.ValidationError):
+ error = JsonSchemaError(
+ message=validation_error.message,
+ data_path=to_path(validation_error.absolute_path),
+ json_path=json_path(validation_error.absolute_path),
+ schema_path=to_path(validation_error.schema_path),
+ relative_schema=validation_error.schema,
+ expected=validation_error.validator_value,
+ validator=str(validation_error.validator),
+ found=str(validation_error.instance),
+ )
+ errors.append(error)
+ return sorted(errors)
diff --git a/src/ansible_compat/types.py b/src/ansible_compat/types.py
new file mode 100644
index 0000000..4514606
--- /dev/null
+++ b/src/ansible_compat/types.py
@@ -0,0 +1,23 @@
+"""Custom types."""
+from __future__ import annotations
+
+from collections.abc import Mapping, Sequence
+from typing import Union
+
+try: # py39 does not have TypeAlias
+ from typing_extensions import TypeAlias
+except ImportError:
+ from typing import TypeAlias # type: ignore[no-redef,attr-defined]
+
+JSON: TypeAlias = Union[dict[str, "JSON"], list["JSON"], str, int, float, bool, None]
+JSON_ro: TypeAlias = Union[
+ Mapping[str, "JSON_ro"],
+ Sequence["JSON_ro"],
+ str,
+ int,
+ float,
+ bool,
+ None,
+]
+
+__all__ = ["JSON", "JSON_ro"]
diff --git a/test/__init__.py b/test/__init__.py
new file mode 100644
index 0000000..689eb7b
--- /dev/null
+++ b/test/__init__.py
@@ -0,0 +1 @@
+"""Tests for ansible_compat package."""
diff --git a/test/assets/galaxy_paths/.bar/galaxy.yml b/test/assets/galaxy_paths/.bar/galaxy.yml
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test/assets/galaxy_paths/.bar/galaxy.yml
diff --git a/test/assets/galaxy_paths/foo/galaxy.yml b/test/assets/galaxy_paths/foo/galaxy.yml
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test/assets/galaxy_paths/foo/galaxy.yml
diff --git a/test/assets/requirements-invalid-collection.yml b/test/assets/requirements-invalid-collection.yml
new file mode 100644
index 0000000..6ace6cf
--- /dev/null
+++ b/test/assets/requirements-invalid-collection.yml
@@ -0,0 +1,3 @@
+# "ansible-galaxy collection install" is expected to fail this invalid file
+collections:
+ - foo: bar
diff --git a/test/assets/requirements-invalid-role.yml b/test/assets/requirements-invalid-role.yml
new file mode 100644
index 0000000..e02c64e
--- /dev/null
+++ b/test/assets/requirements-invalid-role.yml
@@ -0,0 +1,3 @@
+# file expected to make "ansible-galaxy role install" to fail
+roles:
+ - this_role_does_not_exist
diff --git a/test/assets/validate0_data.json b/test/assets/validate0_data.json
new file mode 100644
index 0000000..e9f6f2e
--- /dev/null
+++ b/test/assets/validate0_data.json
@@ -0,0 +1 @@
+{ "environment": { "a": false, "b": true, "c": "foo" } }
diff --git a/test/assets/validate0_expected.json b/test/assets/validate0_expected.json
new file mode 100644
index 0000000..ea36da9
--- /dev/null
+++ b/test/assets/validate0_expected.json
@@ -0,0 +1,22 @@
+[
+ {
+ "message": "False is not of type 'string'",
+ "data_path": "environment.a",
+ "json_path": "$.environment.a",
+ "schema_path": "properties.environment.additionalProperties.type",
+ "relative_schema": { "type": "string" },
+ "expected": "string",
+ "validator": "type",
+ "found": "False"
+ },
+ {
+ "message": "True is not of type 'string'",
+ "data_path": "environment.b",
+ "json_path": "$.environment.b",
+ "schema_path": "properties.environment.additionalProperties.type",
+ "relative_schema": { "type": "string" },
+ "expected": "string",
+ "validator": "type",
+ "found": "True"
+ }
+]
diff --git a/test/assets/validate0_schema.json b/test/assets/validate0_schema.json
new file mode 100644
index 0000000..e642fb0
--- /dev/null
+++ b/test/assets/validate0_schema.json
@@ -0,0 +1,9 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "properties": {
+ "environment": {
+ "type": "object",
+ "additionalProperties": { "type": "string" }
+ }
+ }
+}
diff --git a/test/collections/acme.broken/galaxy.yml b/test/collections/acme.broken/galaxy.yml
new file mode 100644
index 0000000..599fd5b
--- /dev/null
+++ b/test/collections/acme.broken/galaxy.yml
@@ -0,0 +1 @@
+foo: that is not a valid collection!
diff --git a/test/collections/acme.goodies/galaxy.yml b/test/collections/acme.goodies/galaxy.yml
new file mode 100644
index 0000000..9682115
--- /dev/null
+++ b/test/collections/acme.goodies/galaxy.yml
@@ -0,0 +1,34 @@
+name: goodies
+namespace: acme
+version: 1.0.0
+readme: README.md
+authors:
+ - Red Hat
+description: Sample collection to use with molecule
+dependencies:
+ community.molecule: ">=0.1.0" # used to also test '=>' condition
+ ansible.utils: "*" # used to also test '*'
+ git+https://github.com/ansible-collections/community.crypto.git: main # tests ability to install from git
+build_ignore:
+ - "*.egg-info"
+ - .DS_Store
+ - .eggs
+ - .gitignore
+ - .mypy_cache
+ - .pytest_cache
+ - .stestr
+ - .stestr.conf
+ - .tox
+ - .vscode
+ - MANIFEST.in
+ - build
+ - dist
+ - doc
+ - report.html
+ - setup.cfg
+ - setup.py
+ - "tests/unit/*.*"
+ - README.rst
+ - tox.ini
+
+license_file: LICENSE
diff --git a/test/collections/acme.goodies/molecule/default/converge.yml b/test/collections/acme.goodies/molecule/default/converge.yml
new file mode 100644
index 0000000..b85e064
--- /dev/null
+++ b/test/collections/acme.goodies/molecule/default/converge.yml
@@ -0,0 +1,7 @@
+---
+- name: Converge
+ hosts: localhost
+ tasks:
+ - name: "Include sample role from current collection"
+ include_role:
+ name: acme.goodies.baz
diff --git a/test/collections/acme.goodies/molecule/default/molecule.yml b/test/collections/acme.goodies/molecule/default/molecule.yml
new file mode 100644
index 0000000..74c8557
--- /dev/null
+++ b/test/collections/acme.goodies/molecule/default/molecule.yml
@@ -0,0 +1,11 @@
+---
+dependency:
+ name: galaxy
+driver:
+ name: delegated
+platforms:
+ - name: instance
+provisioner:
+ name: ansible
+verifier:
+ name: ansible
diff --git a/test/collections/acme.goodies/roles/baz/molecule/deep_scenario/converge.yml b/test/collections/acme.goodies/roles/baz/molecule/deep_scenario/converge.yml
new file mode 100644
index 0000000..c18086f
--- /dev/null
+++ b/test/collections/acme.goodies/roles/baz/molecule/deep_scenario/converge.yml
@@ -0,0 +1,7 @@
+---
+- name: Converge
+ hosts: localhost
+ tasks:
+ - name: "Sample testing task part of deep_scenario"
+ include_role:
+ name: acme.goodies.baz
diff --git a/test/collections/acme.goodies/roles/baz/molecule/deep_scenario/molecule.yml b/test/collections/acme.goodies/roles/baz/molecule/deep_scenario/molecule.yml
new file mode 100644
index 0000000..74c8557
--- /dev/null
+++ b/test/collections/acme.goodies/roles/baz/molecule/deep_scenario/molecule.yml
@@ -0,0 +1,11 @@
+---
+dependency:
+ name: galaxy
+driver:
+ name: delegated
+platforms:
+ - name: instance
+provisioner:
+ name: ansible
+verifier:
+ name: ansible
diff --git a/test/collections/acme.goodies/roles/baz/tasks/main.yml b/test/collections/acme.goodies/roles/baz/tasks/main.yml
new file mode 100644
index 0000000..f5fc693
--- /dev/null
+++ b/test/collections/acme.goodies/roles/baz/tasks/main.yml
@@ -0,0 +1,3 @@
+- name: "some task inside foo.bar collection"
+ debug:
+ msg: "hello world!"
diff --git a/test/collections/acme.goodies/tests/requirements.yml b/test/collections/acme.goodies/tests/requirements.yml
new file mode 100644
index 0000000..b004fa9
--- /dev/null
+++ b/test/collections/acme.goodies/tests/requirements.yml
@@ -0,0 +1,3 @@
+collections:
+ - name: ansible.posix
+ version: ">=1.0"
diff --git a/test/collections/acme.minimal/galaxy.yml b/test/collections/acme.minimal/galaxy.yml
new file mode 100644
index 0000000..a15e418
--- /dev/null
+++ b/test/collections/acme.minimal/galaxy.yml
@@ -0,0 +1,30 @@
+name: minimal
+namespace: acme
+version: 1.0.0
+readme: README.md
+authors:
+ - Red Hat
+description: Sample collection to use with molecule
+build_ignore:
+ - "*.egg-info"
+ - .DS_Store
+ - .eggs
+ - .gitignore
+ - .mypy_cache
+ - .pytest_cache
+ - .stestr
+ - .stestr.conf
+ - .tox
+ - .vscode
+ - MANIFEST.in
+ - build
+ - dist
+ - doc
+ - report.html
+ - setup.cfg
+ - setup.py
+ - "tests/unit/*.*"
+ - README.rst
+ - tox.ini
+
+license_file: LICENSE
diff --git a/test/conftest.py b/test/conftest.py
new file mode 100644
index 0000000..a1e4893
--- /dev/null
+++ b/test/conftest.py
@@ -0,0 +1,127 @@
+"""Pytest fixtures."""
+import importlib.metadata
+import json
+import pathlib
+import subprocess
+import sys
+from collections.abc import Generator
+from pathlib import Path
+from typing import Callable
+
+import pytest
+
+from ansible_compat.runtime import Runtime
+
+
+@pytest.fixture()
+# pylint: disable=unused-argument
+def runtime(scope: str = "session") -> Generator[Runtime, None, None]: # noqa: ARG001
+ """Isolated runtime fixture."""
+ instance = Runtime(isolated=True)
+ yield instance
+ instance.clean()
+
+
+@pytest.fixture()
+# pylint: disable=unused-argument
+def runtime_tmp(
+ tmp_path: pathlib.Path,
+ scope: str = "session", # noqa: ARG001
+) -> Generator[Runtime, None, None]:
+ """Isolated runtime fixture using a temp directory."""
+ instance = Runtime(project_dir=tmp_path, isolated=True)
+ yield instance
+ instance.clean()
+
+
+def query_pkg_version(pkg: str) -> str:
+ """Get the version of a current installed package.
+
+ :param pkg: Package name
+ :return: Package version
+ """
+ return importlib.metadata.version(pkg)
+
+
+@pytest.fixture()
+def pkg_version() -> Callable[[str], str]:
+ """Get the version of a current installed package.
+
+ :return: Callable function to get package version
+ """
+ return query_pkg_version
+
+
+class VirtualEnvironment:
+ """Virtualenv wrapper."""
+
+ def __init__(self, path: Path) -> None:
+ """Initialize.
+
+ :param path: Path to virtualenv
+ """
+ self.project = path
+ self.venv_path = self.project / "venv"
+ self.venv_bin_path = self.venv_path / "bin"
+ self.venv_python_path = self.venv_bin_path / "python"
+
+ def create(self) -> None:
+ """Create virtualenv."""
+ cmd = [str(sys.executable), "-m", "venv", str(self.venv_path)]
+ subprocess.check_call(args=cmd)
+ # Install this package into the virtual environment
+ self.install(f"{__file__}/../..")
+
+ def install(self, *packages: str) -> None:
+ """Install packages in virtualenv.
+
+ :param packages: Packages to install
+ """
+ cmd = [str(self.venv_python_path), "-m", "pip", "install", *packages]
+ subprocess.check_call(args=cmd)
+
+ def python_script_run(self, script: str) -> subprocess.CompletedProcess[str]:
+ """Run command in project dir using venv.
+
+ :param args: Command to run
+ """
+ proc = subprocess.run(
+ args=[self.venv_python_path, "-c", script],
+ capture_output=True,
+ cwd=self.project,
+ check=False,
+ text=True,
+ )
+ return proc
+
+ def site_package_dirs(self) -> list[Path]:
+ """Get site packages.
+
+ :return: List of site packages dirs
+ """
+ script = "import json, site; print(json.dumps(site.getsitepackages()))"
+ proc = subprocess.run(
+ args=[self.venv_python_path, "-c", script],
+ capture_output=True,
+ check=False,
+ text=True,
+ )
+ dirs = json.loads(proc.stdout)
+ if not isinstance(dirs, list):
+ msg = "Expected list of site packages"
+ raise TypeError(msg)
+ sanitized = list({Path(d).resolve() for d in dirs})
+ return sanitized
+
+
+@pytest.fixture(scope="module")
+def venv_module(tmp_path_factory: pytest.TempPathFactory) -> VirtualEnvironment:
+ """Create a virtualenv in a temporary directory.
+
+ :param tmp_path: pytest fixture for temp path
+ :return: VirtualEnvironment instance
+ """
+ test_project = tmp_path_factory.mktemp(basename="test_project-", numbered=True)
+ _venv = VirtualEnvironment(test_project)
+ _venv.create()
+ return _venv
diff --git a/test/roles/acme.missing_deps/meta/main.yml b/test/roles/acme.missing_deps/meta/main.yml
new file mode 100644
index 0000000..69b0417
--- /dev/null
+++ b/test/roles/acme.missing_deps/meta/main.yml
@@ -0,0 +1,8 @@
+---
+galaxy_info:
+ name: missing_deps
+ namespace: acme
+ description: foo
+ license: GPL
+ min_ansible_version: "2.10"
+ platforms: []
diff --git a/test/roles/acme.missing_deps/requirements.yml b/test/roles/acme.missing_deps/requirements.yml
new file mode 100644
index 0000000..53c5937
--- /dev/null
+++ b/test/roles/acme.missing_deps/requirements.yml
@@ -0,0 +1,2 @@
+collections:
+ - foo.bar # collection that does not exist, so we can test offline mode
diff --git a/test/roles/acme.sample2/meta/main.yml b/test/roles/acme.sample2/meta/main.yml
new file mode 100644
index 0000000..b682a84
--- /dev/null
+++ b/test/roles/acme.sample2/meta/main.yml
@@ -0,0 +1,16 @@
+---
+dependencies: []
+
+galaxy_info:
+ # role_name is missing in order to test deduction from folder name
+ author: acme
+ description: ACME sample role
+ company: "ACME LTD"
+ license: MIT
+ min_ansible_version: "2.9"
+ platforms:
+ - name: Debian
+ versions:
+ - any
+ galaxy_tags:
+ - samples
diff --git a/test/roles/ansible-role-sample/meta/main.yml b/test/roles/ansible-role-sample/meta/main.yml
new file mode 100644
index 0000000..bfddeb7
--- /dev/null
+++ b/test/roles/ansible-role-sample/meta/main.yml
@@ -0,0 +1,16 @@
+---
+dependencies: []
+
+galaxy_info:
+ role_name: sample
+ author: acme
+ description: ACME sample role
+ company: "ACME LTD"
+ license: MIT
+ min_ansible_version: "2.9"
+ platforms:
+ - name: Debian
+ versions:
+ - any
+ galaxy_tags:
+ - samples
diff --git a/test/roles/sample3/meta/main.yml b/test/roles/sample3/meta/main.yml
new file mode 100644
index 0000000..f479788
--- /dev/null
+++ b/test/roles/sample3/meta/main.yml
@@ -0,0 +1,16 @@
+---
+dependencies: []
+
+galaxy_info:
+ # role_name is missing in order to test deduction from folder name
+ author: acme
+ description: ACME samble role
+ company: "ACME LTD"
+ license: MIT
+ min_ansible_version: "2.9"
+ platforms:
+ - name: Debian
+ versions:
+ - any
+ galaxy_tags:
+ - samples
diff --git a/test/roles/sample4/meta/main.yml b/test/roles/sample4/meta/main.yml
new file mode 100644
index 0000000..f479788
--- /dev/null
+++ b/test/roles/sample4/meta/main.yml
@@ -0,0 +1,16 @@
+---
+dependencies: []
+
+galaxy_info:
+ # role_name is missing in order to test deduction from folder name
+ author: acme
+ description: ACME samble role
+ company: "ACME LTD"
+ license: MIT
+ min_ansible_version: "2.9"
+ platforms:
+ - name: Debian
+ versions:
+ - any
+ galaxy_tags:
+ - samples
diff --git a/test/test_api.py b/test/test_api.py
new file mode 100644
index 0000000..80b38ba
--- /dev/null
+++ b/test/test_api.py
@@ -0,0 +1,5 @@
+"""Tests for ansible_compat package."""
+
+
+def test_placeholder() -> None:
+ """Placeholder test."""
diff --git a/test/test_config.py b/test/test_config.py
new file mode 100644
index 0000000..4f854ae
--- /dev/null
+++ b/test/test_config.py
@@ -0,0 +1,86 @@
+"""Tests for ansible_compat.config submodule."""
+import copy
+import subprocess
+
+import pytest
+from _pytest.monkeypatch import MonkeyPatch
+from packaging.version import Version
+
+from ansible_compat.config import AnsibleConfig, ansible_version, parse_ansible_version
+from ansible_compat.errors import InvalidPrerequisiteError, MissingAnsibleError
+
+
+def test_config() -> None:
+ """Checks that config vars are loaded with their expected type."""
+ config = AnsibleConfig()
+ assert isinstance(config.ACTION_WARNINGS, bool)
+ assert isinstance(config.CACHE_PLUGIN_PREFIX, str)
+ assert isinstance(config.CONNECTION_FACTS_MODULES, dict)
+ assert config.ANSIBLE_COW_PATH is None
+ assert isinstance(config.NETWORK_GROUP_MODULES, list)
+ assert isinstance(config.DEFAULT_GATHER_TIMEOUT, (int, type(None)))
+
+ # check lowercase and older name aliasing
+ assert isinstance(config.collections_paths, list)
+ assert isinstance(config.collections_path, list)
+ assert config.collections_paths == config.collections_path
+
+ # check if we can access the special data member
+ assert config.data["ACTION_WARNINGS"] == config.ACTION_WARNINGS
+
+ with pytest.raises(AttributeError):
+ _ = config.THIS_DOES_NOT_EXIST
+
+
+def test_config_with_dump() -> None:
+ """Tests that config can parse given dumps."""
+ config = AnsibleConfig(config_dump="ACTION_WARNINGS(default) = True")
+ assert config.ACTION_WARNINGS is True
+
+
+def test_config_copy() -> None:
+ """Checks ability to use copy/deepcopy."""
+ config = AnsibleConfig()
+ new_config = copy.copy(config)
+ assert isinstance(new_config, AnsibleConfig)
+ assert new_config is not config
+ # deepcopy testing
+ new_config = copy.deepcopy(config)
+ assert isinstance(new_config, AnsibleConfig)
+ assert new_config is not config
+
+
+def test_parse_ansible_version_fail() -> None:
+ """Checks that parse_ansible_version raises an error on invalid input."""
+ with pytest.raises(
+ InvalidPrerequisiteError,
+ match="Unable to parse ansible cli version",
+ ):
+ parse_ansible_version("foo")
+
+
+def test_ansible_version_missing(monkeypatch: MonkeyPatch) -> None:
+ """Validate ansible_version behavior when ansible is missing."""
+ monkeypatch.setattr(
+ "subprocess.run",
+ lambda *args, **kwargs: subprocess.CompletedProcess( # noqa: ARG005
+ args=[],
+ returncode=1,
+ ),
+ )
+ with pytest.raises(
+ MissingAnsibleError,
+ match="Unable to find a working copy of ansible executable.",
+ ):
+ # bypassing lru cache
+ ansible_version.__wrapped__()
+
+
+def test_ansible_version() -> None:
+ """Validate ansible_version behavior."""
+ assert ansible_version() >= Version("1.0")
+
+
+def test_ansible_version_arg() -> None:
+ """Validate ansible_version behavior."""
+ assert ansible_version("2.0") >= Version("1.0")
diff --git a/test/test_configuration_example.py b/test/test_configuration_example.py
new file mode 100644
index 0000000..3a2c9b7
--- /dev/null
+++ b/test/test_configuration_example.py
@@ -0,0 +1,12 @@
+"""Sample usage of AnsibleConfig."""
+from ansible_compat.config import AnsibleConfig
+
+
+def test_example_config() -> None:
+ """Test basic functionality of AnsibleConfig."""
+ cfg = AnsibleConfig()
+ assert isinstance(cfg.ACTION_WARNINGS, bool)
+ # you can also use lowercase:
+ assert isinstance(cfg.action_warnings, bool)
+ # you can also use it as dictionary
+ assert cfg["action_warnings"] == cfg.action_warnings
diff --git a/test/test_loaders.py b/test/test_loaders.py
new file mode 100644
index 0000000..7a91a4c
--- /dev/null
+++ b/test/test_loaders.py
@@ -0,0 +1,9 @@
+"""Test for ansible_compat.loaders module."""
+from pathlib import Path
+
+from ansible_compat.loaders import colpath_from_path
+
+
+def test_colpath_from_path() -> None:
+ """Test colpath_from_path non existing path."""
+ assert colpath_from_path(Path("/foo/bar/")) is None
diff --git a/test/test_prerun.py b/test/test_prerun.py
new file mode 100644
index 0000000..1549756
--- /dev/null
+++ b/test/test_prerun.py
@@ -0,0 +1,11 @@
+"""Tests for ansible_compat.prerun module."""
+from pathlib import Path
+
+from ansible_compat.prerun import get_cache_dir
+
+
+def test_get_cache_dir_relative() -> None:
+ """Test behaviors of get_cache_dir."""
+ relative_path = Path()
+ abs_path = relative_path.resolve()
+ assert get_cache_dir(relative_path) == get_cache_dir(abs_path)
diff --git a/test/test_runtime.py b/test/test_runtime.py
new file mode 100644
index 0000000..2af343d
--- /dev/null
+++ b/test/test_runtime.py
@@ -0,0 +1,893 @@
+"""Tests for Runtime class."""
+# pylint: disable=protected-access
+from __future__ import annotations
+
+import logging
+import os
+import pathlib
+import subprocess
+from contextlib import contextmanager
+from pathlib import Path
+from shutil import rmtree
+from typing import TYPE_CHECKING, Any
+
+import pytest
+from packaging.version import Version
+
+from ansible_compat.config import ansible_version
+from ansible_compat.constants import INVALID_PREREQUISITES_RC
+from ansible_compat.errors import (
+ AnsibleCommandError,
+ AnsibleCompatError,
+ InvalidPrerequisiteError,
+)
+from ansible_compat.runtime import (
+ CompletedProcess,
+ Runtime,
+ _get_galaxy_role_name,
+ is_url,
+ search_galaxy_paths,
+)
+
+if TYPE_CHECKING:
+ from collections.abc import Iterator
+
+ from _pytest.monkeypatch import MonkeyPatch
+ from pytest_mock import MockerFixture
+
+
+def test_runtime_version(runtime: Runtime) -> None:
+ """Tests version property."""
+ version = runtime.version
+ assert isinstance(version, Version)
+ # tests that caching property value worked (coverage)
+ assert version == runtime.version
+
+
+@pytest.mark.parametrize(
+ "require_module",
+ (True, False),
+ ids=("module-required", "module-unrequired"),
+)
+def test_runtime_version_outdated(require_module: bool) -> None:
+ """Checks that instantiation raises if version is outdated."""
+ with pytest.raises(RuntimeError, match="Found incompatible version of ansible"):
+ Runtime(min_required_version="9999.9.9", require_module=require_module)
+
+
+def test_runtime_missing_ansible_module(monkeypatch: MonkeyPatch) -> None:
+ """Checks that we produce a RuntimeError when ansible module is missing."""
+
+ class RaiseException:
+ """Class to raise an exception."""
+
+ def __init__(
+ self,
+ *args: Any, # noqa: ARG002,ANN401
+ **kwargs: Any, # noqa: ARG002,ANN401
+ ) -> None:
+ raise ModuleNotFoundError
+
+ monkeypatch.setattr("importlib.import_module", RaiseException)
+
+ with pytest.raises(RuntimeError, match="Unable to find Ansible python module."):
+ Runtime(require_module=True)
+
+
+def test_runtime_mismatch_ansible_module(monkeypatch: MonkeyPatch) -> None:
+ """Test that missing module is detected."""
+ monkeypatch.setattr("ansible.release.__version__", "0.0.0", raising=False)
+ with pytest.raises(RuntimeError, match="versions do not match"):
+ Runtime(require_module=True)
+
+
+def test_runtime_require_module() -> None:
+ """Check that require_module successful pass."""
+ Runtime(require_module=True)
+ # Now we try to set the collection path, something to check if that is
+ # causing an exception, as 2.15 introduced new init code.
+ from ansible.utils.collection_loader import ( # pylint: disable=import-outside-toplevel
+ AnsibleCollectionConfig,
+ )
+
+ AnsibleCollectionConfig.playbook_paths = "."
+ # Calling it again in order to see that it does not produce UserWarning: AnsibleCollectionFinder has already been configured
+ # which is done by Ansible core 2.15+. We added special code inside Runtime
+ # that should avoid initializing twice and raise that warning.
+ Runtime(require_module=True)
+
+
+def test_runtime_version_fail_module(mocker: MockerFixture) -> None:
+ """Tests for failure to detect Ansible version."""
+ patched = mocker.patch(
+ "ansible_compat.runtime.parse_ansible_version",
+ autospec=True,
+ )
+ patched.side_effect = InvalidPrerequisiteError(
+ "Unable to parse ansible cli version",
+ )
+ runtime = Runtime()
+ with pytest.raises(
+ InvalidPrerequisiteError,
+ match="Unable to parse ansible cli version",
+ ):
+ _ = runtime.version # pylint: disable=pointless-statement
+
+
+def test_runtime_version_fail_cli(mocker: MockerFixture) -> None:
+ """Tests for failure to detect Ansible version."""
+ mocker.patch(
+ "ansible_compat.runtime.Runtime.run",
+ return_value=CompletedProcess(
+ ["x"],
+ returncode=123,
+ stdout="oops",
+ stderr="some error",
+ ),
+ autospec=True,
+ )
+ runtime = Runtime()
+ with pytest.raises(
+ RuntimeError,
+ match="Unable to find a working copy of ansible executable.",
+ ):
+ _ = runtime.version # pylint: disable=pointless-statement
+
+
+def test_runtime_prepare_ansible_paths_validation() -> None:
+ """Check that we validate collection_path."""
+ runtime = Runtime()
+ runtime.config.collections_paths = "invalid-value" # type: ignore[assignment]
+ with pytest.raises(RuntimeError, match="Unexpected ansible configuration"):
+ runtime._prepare_ansible_paths()
+
+
+@pytest.mark.parametrize(
+ ("folder", "role_name", "isolated"),
+ (
+ ("ansible-role-sample", "acme.sample", True),
+ ("acme.sample2", "acme.sample2", True),
+ ("sample3", "acme.sample3", True),
+ ("sample4", "acme.sample4", False),
+ ),
+ ids=("1", "2", "3", "4"),
+)
+def test_runtime_install_role(
+ caplog: pytest.LogCaptureFixture,
+ folder: str,
+ role_name: str,
+ isolated: bool,
+) -> None:
+ """Checks that we can install roles."""
+ caplog.set_level(logging.INFO)
+ project_dir = Path(__file__).parent / "roles" / folder
+ runtime = Runtime(isolated=isolated, project_dir=project_dir)
+ runtime.prepare_environment(install_local=True)
+ # check that role appears as installed now
+ result = runtime.run(["ansible-galaxy", "list"])
+ assert result.returncode == 0, result
+ assert role_name in result.stdout
+ if isolated:
+ assert pathlib.Path(f"{runtime.cache_dir}/roles/{role_name}").is_symlink()
+ else:
+ assert pathlib.Path(
+ f"{Path(runtime.config.default_roles_path[0]).expanduser()}/{role_name}",
+ ).is_symlink()
+ runtime.clean()
+ # also test that clean does not break when cache_dir is missing
+ tmp_dir = runtime.cache_dir
+ runtime.cache_dir = None
+ runtime.clean()
+ runtime.cache_dir = tmp_dir
+
+
+def test_prepare_environment_with_collections(tmp_path: pathlib.Path) -> None:
+ """Check that collections are correctly installed."""
+ runtime = Runtime(isolated=True, project_dir=tmp_path)
+ runtime.prepare_environment(required_collections={"community.molecule": "0.1.0"})
+
+
+def test_runtime_install_requirements_missing_file() -> None:
+ """Check that missing requirements file is ignored."""
+ # Do not rely on this behavior, it may be removed in the future
+ runtime = Runtime()
+ runtime.install_requirements(Path("/that/does/not/exist"))
+
+
+@pytest.mark.parametrize(
+ ("file", "exc", "msg"),
+ (
+ (
+ Path("/dev/null"),
+ InvalidPrerequisiteError,
+ "file is not a valid Ansible requirements file",
+ ),
+ (
+ Path(__file__).parent / "assets" / "requirements-invalid-collection.yml",
+ AnsibleCommandError,
+ "Got 1 exit code while running: ansible-galaxy",
+ ),
+ (
+ Path(__file__).parent / "assets" / "requirements-invalid-role.yml",
+ AnsibleCommandError,
+ "Got 1 exit code while running: ansible-galaxy",
+ ),
+ ),
+ ids=("empty", "invalid-collection", "invalid-role"),
+)
+def test_runtime_install_requirements_invalid_file(
+ file: Path,
+ exc: type[Any],
+ msg: str,
+) -> None:
+ """Check that invalid requirements file is raising."""
+ runtime = Runtime()
+ with pytest.raises(
+ exc,
+ match=msg,
+ ):
+ runtime.install_requirements(file)
+
+
+@contextmanager
+def cwd(path: Path) -> Iterator[None]:
+ """Context manager for temporary changing current working directory."""
+ old_pwd = Path.cwd()
+ os.chdir(path)
+ try:
+ yield
+ finally:
+ os.chdir(old_pwd)
+
+
+def test_prerun_reqs_v1(caplog: pytest.LogCaptureFixture) -> None:
+ """Checks that the linter can auto-install requirements v1 when found."""
+ runtime = Runtime(verbosity=1)
+ path = Path(__file__).parent.parent / "examples" / "reqs_v1"
+ with cwd(path):
+ runtime.prepare_environment()
+ assert any(
+ msg.startswith("Running ansible-galaxy role install") for msg in caplog.messages
+ )
+ assert all(
+ "Running ansible-galaxy collection install" not in msg
+ for msg in caplog.messages
+ )
+
+
+def test_prerun_reqs_v2(caplog: pytest.LogCaptureFixture) -> None:
+ """Checks that the linter can auto-install requirements v2 when found."""
+ runtime = Runtime(verbosity=1)
+ path = (Path(__file__).parent.parent / "examples" / "reqs_v2").resolve()
+ with cwd(path):
+ runtime.prepare_environment()
+ assert any(
+ msg.startswith("Running ansible-galaxy role install")
+ for msg in caplog.messages
+ )
+ assert any(
+ msg.startswith("Running ansible-galaxy collection install")
+ for msg in caplog.messages
+ )
+
+
+def test_prerun_reqs_broken(runtime: Runtime) -> None:
+ """Checks that the we report invalid requirements.yml file."""
+ path = (Path(__file__).parent.parent / "examples" / "reqs_broken").resolve()
+ with cwd(path), pytest.raises(InvalidPrerequisiteError):
+ runtime.prepare_environment()
+
+
+def test__update_env_no_old_value_no_default_no_value(monkeypatch: MonkeyPatch) -> None:
+ """Make sure empty value does not touch environment."""
+ monkeypatch.delenv("DUMMY_VAR", raising=False)
+
+ runtime = Runtime()
+ runtime._update_env("DUMMY_VAR", [])
+
+ assert "DUMMY_VAR" not in runtime.environ
+
+
+def test__update_env_no_old_value_no_value(monkeypatch: MonkeyPatch) -> None:
+ """Make sure empty value does not touch environment."""
+ monkeypatch.delenv("DUMMY_VAR", raising=False)
+
+ runtime = Runtime()
+ runtime._update_env("DUMMY_VAR", [], "a:b")
+
+ assert "DUMMY_VAR" not in runtime.environ
+
+
+def test__update_env_no_default_no_value(monkeypatch: MonkeyPatch) -> None:
+ """Make sure empty value does not touch environment."""
+ monkeypatch.setenv("DUMMY_VAR", "a:b")
+
+ runtime = Runtime()
+ runtime._update_env("DUMMY_VAR", [])
+
+ assert runtime.environ["DUMMY_VAR"] == "a:b"
+
+
+@pytest.mark.parametrize(
+ ("value", "result"),
+ (
+ (["a"], "a"),
+ (["a", "b"], "a:b"),
+ (["a", "b", "c"], "a:b:c"),
+ ),
+)
+def test__update_env_no_old_value_no_default(
+ monkeypatch: MonkeyPatch,
+ value: list[str],
+ result: str,
+) -> None:
+ """Values are concatenated using : as the separator."""
+ monkeypatch.delenv("DUMMY_VAR", raising=False)
+
+ runtime = Runtime()
+ runtime._update_env("DUMMY_VAR", value)
+
+ assert runtime.environ["DUMMY_VAR"] == result
+
+
+@pytest.mark.parametrize(
+ ("default", "value", "result"),
+ (
+ ("a:b", ["c"], "c:a:b"),
+ ("a:b", ["c:d"], "c:d:a:b"),
+ ),
+)
+def test__update_env_no_old_value(
+ monkeypatch: MonkeyPatch,
+ default: str,
+ value: list[str],
+ result: str,
+) -> None:
+ """Values are appended to default value."""
+ monkeypatch.delenv("DUMMY_VAR", raising=False)
+
+ runtime = Runtime()
+ runtime._update_env("DUMMY_VAR", value, default)
+
+ assert runtime.environ["DUMMY_VAR"] == result
+
+
+@pytest.mark.parametrize(
+ ("old_value", "value", "result"),
+ (
+ ("a:b", ["c"], "c:a:b"),
+ ("a:b", ["c:d"], "c:d:a:b"),
+ ),
+)
+def test__update_env_no_default(
+ monkeypatch: MonkeyPatch,
+ old_value: str,
+ value: list[str],
+ result: str,
+) -> None:
+ """Values are appended to preexisting value."""
+ monkeypatch.setenv("DUMMY_VAR", old_value)
+
+ runtime = Runtime()
+ runtime._update_env("DUMMY_VAR", value)
+
+ assert runtime.environ["DUMMY_VAR"] == result
+
+
+@pytest.mark.parametrize(
+ ("old_value", "default", "value", "result"),
+ (
+ ("", "", ["e"], "e"),
+ ("a", "", ["e"], "e:a"),
+ ("", "c", ["e"], "e"),
+ ("a", "c", ["e:f"], "e:f:a"),
+ ),
+)
+def test__update_env(
+ monkeypatch: MonkeyPatch,
+ old_value: str,
+ default: str, # pylint: disable=unused-argument # noqa: ARG001
+ value: list[str],
+ result: str,
+) -> None:
+ """Defaults are ignored when preexisting value is present."""
+ monkeypatch.setenv("DUMMY_VAR", old_value)
+
+ runtime = Runtime()
+ runtime._update_env("DUMMY_VAR", value)
+
+ assert runtime.environ["DUMMY_VAR"] == result
+
+
+def test_require_collection_wrong_version(runtime: Runtime) -> None:
+ """Tests behaviour of require_collection."""
+ subprocess.check_output(
+ [ # noqa: S603
+ "ansible-galaxy",
+ "collection",
+ "install",
+ "examples/reqs_v2/community-molecule-0.1.0.tar.gz",
+ "-p",
+ "~/.ansible/collections",
+ ],
+ )
+ with pytest.raises(InvalidPrerequisiteError) as pytest_wrapped_e:
+ runtime.require_collection("community.molecule", "9999.9.9")
+ assert pytest_wrapped_e.type == InvalidPrerequisiteError
+ assert pytest_wrapped_e.value.code == INVALID_PREREQUISITES_RC
+
+
+def test_require_collection_invalid_name(runtime: Runtime) -> None:
+ """Check that require_collection raise with invalid collection name."""
+ with pytest.raises(
+ InvalidPrerequisiteError,
+ match="Invalid collection name supplied:",
+ ):
+ runtime.require_collection("that-is-invalid")
+
+
+def test_require_collection_invalid_collections_path(runtime: Runtime) -> None:
+ """Check that require_collection raise with invalid collections path."""
+ runtime.config.collections_paths = "/that/is/invalid" # type: ignore[assignment]
+ with pytest.raises(
+ InvalidPrerequisiteError,
+ match="Unable to determine ansible collection paths",
+ ):
+ runtime.require_collection("community.molecule")
+
+
+def test_require_collection_preexisting_broken(tmp_path: pathlib.Path) -> None:
+ """Check that require_collection raise with broken pre-existing collection."""
+ runtime = Runtime(isolated=True, project_dir=tmp_path)
+ dest_path: str = runtime.config.collections_paths[0]
+ dest = pathlib.Path(dest_path) / "ansible_collections" / "foo" / "bar"
+ dest.mkdir(parents=True, exist_ok=True)
+ with pytest.raises(InvalidPrerequisiteError, match="missing MANIFEST.json"):
+ runtime.require_collection("foo.bar")
+
+
+def test_require_collection(runtime_tmp: Runtime) -> None:
+ """Check that require collection successful install case."""
+ runtime_tmp.require_collection("community.molecule", "0.1.0")
+
+
+@pytest.mark.parametrize(
+ ("name", "version", "install"),
+ (
+ ("fake_namespace.fake_name", None, True),
+ ("fake_namespace.fake_name", "9999.9.9", True),
+ ("fake_namespace.fake_name", None, False),
+ ),
+ ids=("a", "b", "c"),
+)
+def test_require_collection_missing(
+ name: str,
+ version: str,
+ install: bool,
+ runtime: Runtime,
+) -> None:
+ """Tests behaviour of require_collection, missing case."""
+ with pytest.raises(AnsibleCompatError) as pytest_wrapped_e:
+ runtime.require_collection(name=name, version=version, install=install)
+ assert pytest_wrapped_e.type == InvalidPrerequisiteError
+ assert pytest_wrapped_e.value.code == INVALID_PREREQUISITES_RC
+
+
+def test_install_collection(runtime: Runtime) -> None:
+ """Check that valid collection installs do not fail."""
+ runtime.install_collection("examples/reqs_v2/community-molecule-0.1.0.tar.gz")
+
+
+def test_install_collection_git(runtime: Runtime) -> None:
+ """Check that valid collection installs do not fail."""
+ runtime.install_collection(
+ "git+https://github.com/ansible-collections/ansible.posix,main",
+ )
+
+
+def test_install_collection_dest(runtime: Runtime, tmp_path: pathlib.Path) -> None:
+ """Check that valid collection to custom destination passes."""
+ # Since Ansible 2.15.3 there is no guarantee that this will install the collection at requested path
+ # as it might decide to not install anything if requirement is already present at another location.
+ runtime.install_collection(
+ "examples/reqs_v2/community-molecule-0.1.0.tar.gz",
+ destination=tmp_path,
+ )
+ runtime.load_collections()
+ for collection in runtime.collections:
+ if collection == "community.molecule":
+ return
+ msg = "Failed to find collection as installed."
+ raise AssertionError(msg)
+
+
+def test_install_collection_fail(runtime: Runtime) -> None:
+ """Check that invalid collection install fails."""
+ with pytest.raises(AnsibleCompatError) as pytest_wrapped_e:
+ runtime.install_collection("community.molecule:>=9999.0")
+ assert pytest_wrapped_e.type == InvalidPrerequisiteError
+ assert pytest_wrapped_e.value.code == INVALID_PREREQUISITES_RC
+
+
+def test_install_galaxy_role(runtime_tmp: Runtime) -> None:
+ """Check install role with empty galaxy file."""
+ pathlib.Path(f"{runtime_tmp.project_dir}/galaxy.yml").touch()
+ pathlib.Path(f"{runtime_tmp.project_dir}/meta").mkdir()
+ pathlib.Path(f"{runtime_tmp.project_dir}/meta/main.yml").touch()
+ # this should only raise a warning
+ runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, role_name_check=1)
+ # this should test the bypass role name check path
+ runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, role_name_check=2)
+ # this should raise an error
+ with pytest.raises(
+ InvalidPrerequisiteError,
+ match="does not follow current galaxy requirements",
+ ):
+ runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, role_name_check=0)
+
+
+def test_install_galaxy_role_unlink(
+ caplog: pytest.LogCaptureFixture,
+) -> None:
+ """Test ability to unlink incorrect symlinked roles."""
+ runtime_tmp = Runtime(verbosity=1)
+ runtime_tmp.prepare_environment()
+ pathlib.Path(f"{runtime_tmp.cache_dir}/roles").mkdir(parents=True, exist_ok=True)
+ pathlib.Path(f"{runtime_tmp.cache_dir}/roles/acme.get_rich").symlink_to("/dev/null")
+ pathlib.Path(f"{runtime_tmp.project_dir}/meta").mkdir()
+ pathlib.Path(f"{runtime_tmp.project_dir}/meta/main.yml").write_text(
+ """galaxy_info:
+ role_name: get_rich
+ namespace: acme
+""",
+ encoding="utf-8",
+ )
+ runtime_tmp._install_galaxy_role(runtime_tmp.project_dir)
+ assert "symlink to current repository" in caplog.text
+
+
+def test_install_galaxy_role_bad_namespace(runtime_tmp: Runtime) -> None:
+ """Check install role with bad namespace in galaxy info."""
+ pathlib.Path(f"{runtime_tmp.project_dir}/meta").mkdir()
+ pathlib.Path(f"{runtime_tmp.project_dir}/meta/main.yml").write_text(
+ """galaxy_info:
+ role_name: foo
+ author: bar
+ namespace: ["xxx"]
+""",
+ )
+ # this should raise an error regardless the role_name_check value
+ with pytest.raises(AnsibleCompatError, match="Role namespace must be string, not"):
+ runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, role_name_check=1)
+
+
+@pytest.mark.parametrize(
+ "galaxy_info",
+ (
+ """galaxy_info:
+ role_name: foo-bar
+ namespace: acme
+""",
+ """galaxy_info:
+ role_name: foo-bar
+""",
+ ),
+ ids=("bad-name", "bad-name-without-namespace"),
+)
+def test_install_galaxy_role_name_role_name_check_equals_to_1(
+ runtime_tmp: Runtime,
+ galaxy_info: str,
+ caplog: pytest.LogCaptureFixture,
+) -> None:
+ """Check install role with bad role name in galaxy info."""
+ caplog.set_level(logging.WARN)
+ pathlib.Path(f"{runtime_tmp.project_dir}/meta").mkdir()
+ pathlib.Path(f"{runtime_tmp.project_dir}/meta/main.yml").write_text(
+ galaxy_info,
+ encoding="utf-8",
+ )
+
+ runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, role_name_check=1)
+ assert "Computed fully qualified role name of " in caplog.text
+
+
+def test_install_galaxy_role_no_checks(runtime_tmp: Runtime) -> None:
+ """Check install role with bad namespace in galaxy info."""
+ runtime_tmp.prepare_environment()
+ pathlib.Path(f"{runtime_tmp.project_dir}/meta").mkdir()
+ pathlib.Path(f"{runtime_tmp.project_dir}/meta/main.yml").write_text(
+ """galaxy_info:
+ role_name: foo
+ author: bar
+ namespace: acme
+""",
+ )
+ runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, role_name_check=2)
+ result = runtime_tmp.run(["ansible-galaxy", "list"])
+ assert "- acme.foo," in result.stdout
+ assert result.returncode == 0, result
+
+
+def test_upgrade_collection(runtime_tmp: Runtime) -> None:
+ """Check that collection upgrade is possible."""
+ # ensure that we inject our tmp folders in ansible paths
+ runtime_tmp.prepare_environment()
+
+ # we install specific oudated version of a collection
+ runtime_tmp.install_collection("examples/reqs_v2/community-molecule-0.1.0.tar.gz")
+ with pytest.raises(
+ InvalidPrerequisiteError,
+ match="Found community.molecule collection 0.1.0 but 9.9.9 or newer is required.",
+ ):
+ # we check that when install=False, we raise error
+ runtime_tmp.require_collection("community.molecule", "9.9.9", install=False)
+ # this should not fail, as we have this version
+ runtime_tmp.require_collection("community.molecule", "0.1.0")
+
+
+def test_require_collection_no_cache_dir() -> None:
+ """Check require_collection without a cache directory."""
+ runtime = Runtime()
+ assert not runtime.cache_dir
+ runtime.require_collection("community.molecule", "0.1.0", install=True)
+
+
+def test_runtime_env_ansible_library(monkeypatch: MonkeyPatch) -> None:
+ """Verify that custom path specified using ANSIBLE_LIBRARY is not lost."""
+ path_name = "foo"
+ monkeypatch.setenv("ANSIBLE_LIBRARY", path_name)
+
+ path_name = os.path.realpath(path_name)
+ runtime = Runtime()
+ runtime.prepare_environment()
+ assert path_name in runtime.config.default_module_path
+
+
+@pytest.mark.parametrize(
+ ("lower", "upper", "expected"),
+ (
+ ("1.0", "9999.0", True),
+ (None, "9999.0", True),
+ ("1.0", None, True),
+ ("9999.0", None, False),
+ (None, "1.0", False),
+ ),
+ ids=("1", "2", "3", "4", "5"),
+)
+def test_runtime_version_in_range(
+ lower: str | None,
+ upper: str | None,
+ expected: bool,
+) -> None:
+ """Validate functioning of version_in_range."""
+ runtime = Runtime()
+ assert runtime.version_in_range(lower=lower, upper=upper) is expected
+
+
+@pytest.mark.parametrize(
+ ("path", "scenario", "expected_collections"),
+ (
+ pytest.param(
+ "test/collections/acme.goodies",
+ "default",
+ [
+ "ansible.posix", # from tests/requirements.yml
+ "ansible.utils", # from galaxy.yml
+ "community.molecule", # from galaxy.yml
+ "community.crypto", # from galaxy.yml as a git dependency
+ ],
+ id="normal",
+ ),
+ pytest.param(
+ "test/collections/acme.goodies/roles/baz",
+ "deep_scenario",
+ ["community.molecule"],
+ id="deep",
+ ),
+ ),
+)
+def test_install_collection_from_disk(
+ path: str,
+ scenario: str,
+ expected_collections: list[str],
+) -> None:
+ """Tests ability to install a local collection."""
+ # ensure we do not have acme.goodies installed in user directory as it may
+ # produce false positives
+ rmtree(
+ pathlib.Path(
+ "~/.ansible/collections/ansible_collections/acme/goodies",
+ ).expanduser(),
+ ignore_errors=True,
+ )
+ with cwd(Path(path)):
+ runtime = Runtime(isolated=True)
+ # this should call install_collection_from_disk(".")
+ runtime.prepare_environment(install_local=True)
+ # that molecule converge playbook can be used without molecule and
+ # should validate that the installed collection is available.
+ result = runtime.run(["ansible-playbook", f"molecule/{scenario}/converge.yml"])
+ assert result.returncode == 0, result.stdout
+ runtime.load_collections()
+ for collection_name in expected_collections:
+ assert (
+ collection_name in runtime.collections
+ ), f"{collection_name} not found in {runtime.collections.keys()}"
+ runtime.clean()
+
+
+def test_install_collection_from_disk_fail() -> None:
+ """Tests that we fail to install a broken collection."""
+ with cwd(Path("test/collections/acme.broken")):
+ runtime = Runtime(isolated=True)
+ with pytest.raises(RuntimeError) as exc_info:
+ runtime.prepare_environment(install_local=True)
+ # based on version of Ansible used, we might get a different error,
+ # but both errors should be considered acceptable
+ assert exc_info.type in (
+ RuntimeError,
+ AnsibleCompatError,
+ AnsibleCommandError,
+ InvalidPrerequisiteError,
+ )
+ assert exc_info.match(
+ "(is missing the following mandatory|Got 1 exit code while running: ansible-galaxy collection build)",
+ )
+
+
+def test_prepare_environment_offline_role() -> None:
+ """Ensure that we can make use of offline roles."""
+ with cwd(Path("test/roles/acme.missing_deps")):
+ runtime = Runtime(isolated=True)
+ runtime.prepare_environment(install_local=True, offline=True)
+
+
+def test_runtime_run(runtime: Runtime) -> None:
+ """Check if tee and non tee mode return same kind of results."""
+ result1 = runtime.run(["seq", "10"])
+ result2 = runtime.run(["seq", "10"], tee=True)
+ assert result1.returncode == result2.returncode
+ assert result1.stderr == result2.stderr
+ assert result1.stdout == result2.stdout
+
+
+def test_runtime_exec_cwd(runtime: Runtime) -> None:
+ """Check if passing cwd works as expected."""
+ path = Path("/")
+ result1 = runtime.run(["pwd"], cwd=path)
+ result2 = runtime.run(["pwd"])
+ assert result1.stdout.rstrip() == str(path)
+ assert result1.stdout != result2.stdout
+
+
+def test_runtime_exec_env(runtime: Runtime) -> None:
+ """Check if passing env works."""
+ result = runtime.run(["printenv", "FOO"])
+ assert not result.stdout
+
+ result = runtime.run(["printenv", "FOO"], env={"FOO": "bar"})
+ assert result.stdout.rstrip() == "bar"
+
+ runtime.environ["FOO"] = "bar"
+ result = runtime.run(["printenv", "FOO"])
+ assert result.stdout.rstrip() == "bar"
+
+
+def test_runtime_plugins(runtime: Runtime) -> None:
+ """Tests ability to access detected plugins."""
+ assert len(runtime.plugins.cliconf) == 0
+ # ansible.netcommon.restconf might be in httpapi
+ assert isinstance(runtime.plugins.httpapi, dict)
+ # "ansible.netcommon.default" might be in runtime.plugins.netconf
+ assert isinstance(runtime.plugins.netconf, dict)
+ assert isinstance(runtime.plugins.role, dict)
+ assert "become" in runtime.plugins.keyword
+
+ if ansible_version() < Version("2.14.0"):
+ assert "sudo" in runtime.plugins.become
+ assert "memory" in runtime.plugins.cache
+ assert "default" in runtime.plugins.callback
+ assert "local" in runtime.plugins.connection
+ assert "ini" in runtime.plugins.inventory
+ assert "env" in runtime.plugins.lookup
+ assert "sh" in runtime.plugins.shell
+ assert "host_group_vars" in runtime.plugins.vars
+ assert "file" in runtime.plugins.module
+ assert "free" in runtime.plugins.strategy
+ # ansible-doc below 2.14 does not support listing 'test' and 'filter' types:
+ with pytest.raises(RuntimeError):
+ assert "is_abs" in runtime.plugins.test
+ with pytest.raises(RuntimeError):
+ assert "bool" in runtime.plugins.filter
+ else:
+ assert "ansible.builtin.sudo" in runtime.plugins.become
+ assert "ansible.builtin.memory" in runtime.plugins.cache
+ assert "ansible.builtin.default" in runtime.plugins.callback
+ assert "ansible.builtin.local" in runtime.plugins.connection
+ assert "ansible.builtin.ini" in runtime.plugins.inventory
+ assert "ansible.builtin.env" in runtime.plugins.lookup
+ assert "ansible.builtin.sh" in runtime.plugins.shell
+ assert "ansible.builtin.host_group_vars" in runtime.plugins.vars
+ assert "ansible.builtin.file" in runtime.plugins.module
+ assert "ansible.builtin.free" in runtime.plugins.strategy
+ assert "ansible.builtin.is_abs" in runtime.plugins.test
+ assert "ansible.builtin.bool" in runtime.plugins.filter
+
+
+@pytest.mark.parametrize(
+ ("path", "result"),
+ (
+ pytest.param(
+ "test/assets/galaxy_paths",
+ ["test/assets/galaxy_paths/foo/galaxy.yml"],
+ id="1",
+ ),
+ pytest.param(
+ "test/collections",
+ [], # should find nothing because these folders are not valid namespaces
+ id="2",
+ ),
+ pytest.param(
+ "test/assets/galaxy_paths/foo",
+ ["test/assets/galaxy_paths/foo/galaxy.yml"],
+ id="3",
+ ),
+ ),
+)
+def test_galaxy_path(path: str, result: list[str]) -> None:
+ """Check behavior of galaxy path search."""
+ assert search_galaxy_paths(Path(path)) == result
+
+
+@pytest.mark.parametrize(
+ ("name", "result"),
+ (
+ pytest.param(
+ "foo",
+ False,
+ id="0",
+ ),
+ pytest.param(
+ "git+git",
+ True,
+ id="1",
+ ),
+ pytest.param(
+ "git@acme.com",
+ True,
+ id="2",
+ ),
+ ),
+)
+def test_is_url(name: str, result: bool) -> None:
+ """Checks functionality of is_url."""
+ assert is_url(name) == result
+
+
+def test_prepare_environment_repair_broken_symlink(
+ caplog: pytest.LogCaptureFixture,
+) -> None:
+ """Ensure we can deal with broken symlinks in collections."""
+ caplog.set_level(logging.INFO)
+ project_dir = Path(__file__).parent / "collections" / "acme.minimal"
+ runtime = Runtime(isolated=True, project_dir=project_dir)
+ assert runtime.cache_dir
+ acme = runtime.cache_dir / "collections" / "ansible_collections" / "acme"
+ acme.mkdir(parents=True, exist_ok=True)
+ goodies = acme / "minimal"
+ rmtree(goodies, ignore_errors=True)
+ goodies.unlink(missing_ok=True)
+ goodies.symlink_to("/invalid/destination")
+ runtime.prepare_environment(install_local=True)
+ assert any(
+ msg.startswith("Collection is symlinked, but not pointing to")
+ for msg in caplog.messages
+ )
+
+
+def test_get_galaxy_role_name_invalid() -> None:
+ """Verifies that function returns empty string on invalid input."""
+ galaxy_infos = {
+ "role_name": False, # <-- invalid data, should be string
+ }
+ assert _get_galaxy_role_name(galaxy_infos) == ""
diff --git a/test/test_runtime_example.py b/test/test_runtime_example.py
new file mode 100644
index 0000000..e500e59
--- /dev/null
+++ b/test/test_runtime_example.py
@@ -0,0 +1,24 @@
+"""Sample use of Runtime class."""
+from ansible_compat.runtime import Runtime
+
+
+def test_runtime_example() -> None:
+ """Test basic functionality of Runtime class."""
+ # instantiate the runtime using isolated mode, so installing new
+ # roles/collections do not pollute the default setup.
+ runtime = Runtime(isolated=True, max_retries=3)
+
+ # Print Ansible core version
+ _ = runtime.version # 2.9.10 (Version object)
+ # Get configuration info from runtime
+ _ = runtime.config.collections_path
+
+ # Detect if current project is a collection and install its requirements
+ runtime.prepare_environment(install_local=True) # will retry 3 times if needed
+
+ # Install a new collection (will retry 3 times if needed)
+ runtime.install_collection("examples/reqs_v2/community-molecule-0.1.0.tar.gz")
+
+ # Execute a command
+ result = runtime.run(["ansible-doc", "--list"])
+ assert result.returncode == 0
diff --git a/test/test_runtime_scan_path.py b/test/test_runtime_scan_path.py
new file mode 100644
index 0000000..be44f1c
--- /dev/null
+++ b/test/test_runtime_scan_path.py
@@ -0,0 +1,102 @@
+"""Test the scan path functionality of the runtime."""
+
+import json
+import textwrap
+from dataclasses import dataclass, fields
+from pathlib import Path
+
+import pytest
+from _pytest.monkeypatch import MonkeyPatch
+
+from ansible_compat.runtime import Runtime
+
+from .conftest import VirtualEnvironment
+
+V2_COLLECTION_TARBALL = Path("examples/reqs_v2/community-molecule-0.1.0.tar.gz")
+V2_COLLECTION_NAMESPACE = "community"
+V2_COLLECTION_NAME = "molecule"
+V2_COLLECTION_VERSION = "0.1.0"
+V2_COLLECTION_FULL_NAME = f"{V2_COLLECTION_NAMESPACE}.{V2_COLLECTION_NAME}"
+
+
+@dataclass
+class ScanSysPath:
+ """Parameters for scan tests."""
+
+ scan: bool
+ raises_not_found: bool
+
+ def __str__(self) -> str:
+ """Return a string representation of the object."""
+ parts = [
+ f"{field.name}{str(getattr(self, field.name))[0]}" for field in fields(self)
+ ]
+ return "-".join(parts)
+
+
+@pytest.mark.parametrize(
+ ("param"),
+ (
+ ScanSysPath(scan=False, raises_not_found=True),
+ ScanSysPath(scan=True, raises_not_found=False),
+ ),
+ ids=str,
+)
+def test_scan_sys_path(
+ venv_module: VirtualEnvironment,
+ monkeypatch: MonkeyPatch,
+ runtime_tmp: Runtime,
+ tmp_path: Path,
+ param: ScanSysPath,
+) -> None:
+ """Confirm sys path is scanned for collections.
+
+ :param venv_module: Fixture for a virtual environment
+ :param monkeypatch: Fixture for monkeypatching
+ :param runtime_tmp: Fixture for a Runtime object
+ :param tmp_dir: Fixture for a temporary directory
+ :param param: The parameters for the test
+ """
+ first_site_package_dir = venv_module.site_package_dirs()[0]
+
+ installed_to = (
+ first_site_package_dir
+ / "ansible_collections"
+ / V2_COLLECTION_NAMESPACE
+ / V2_COLLECTION_NAME
+ )
+ if not installed_to.exists():
+ # Install the collection into the venv site packages directory, force
+ # as of yet this test is not isolated from the rest of the system
+ runtime_tmp.install_collection(
+ collection=V2_COLLECTION_TARBALL,
+ destination=first_site_package_dir,
+ force=True,
+ )
+ # Confirm the collection is installed
+ assert installed_to.exists()
+ # Set the sys scan path environment variable
+ monkeypatch.setenv("ANSIBLE_COLLECTIONS_SCAN_SYS_PATH", str(param.scan))
+ # Set the ansible collections paths to avoid bleed from other tests
+ monkeypatch.setenv("ANSIBLE_COLLECTIONS_PATH", str(tmp_path))
+
+ script = textwrap.dedent(
+ f"""
+ import json;
+ from ansible_compat.runtime import Runtime;
+ r = Runtime();
+ fv, cp = r.require_collection(name="{V2_COLLECTION_FULL_NAME}", version="{V2_COLLECTION_VERSION}", install=False);
+ print(json.dumps({{"found_version": str(fv), "collection_path": str(cp)}}));
+ """,
+ )
+
+ proc = venv_module.python_script_run(script)
+ if param.raises_not_found:
+ assert proc.returncode != 0, (proc.stdout, proc.stderr)
+ assert "InvalidPrerequisiteError" in proc.stderr
+ assert "'community.molecule' not found" in proc.stderr
+ else:
+ assert proc.returncode == 0, (proc.stdout, proc.stderr)
+ result = json.loads(proc.stdout)
+ assert result["found_version"] == V2_COLLECTION_VERSION
+ assert result["collection_path"] == str(installed_to)
diff --git a/test/test_schema.py b/test/test_schema.py
new file mode 100644
index 0000000..b253cb5
--- /dev/null
+++ b/test/test_schema.py
@@ -0,0 +1,73 @@
+"""Tests for schema utilities."""
+from __future__ import annotations
+
+import json
+from pathlib import Path
+from typing import TYPE_CHECKING, Any
+
+import pytest
+
+from ansible_compat.schema import JsonSchemaError, json_path, validate
+
+if TYPE_CHECKING:
+ from ansible_compat.types import JSON
+
+expected_results = [
+ JsonSchemaError(
+ message="False is not of type 'string'",
+ data_path="environment.a",
+ json_path="$.environment.a",
+ schema_path="properties.environment.additionalProperties.type",
+ relative_schema='{"type": "string"}',
+ expected="string",
+ validator="type",
+ found="False",
+ ),
+ JsonSchemaError(
+ message="True is not of type 'string'",
+ data_path="environment.b",
+ json_path="$.environment.b",
+ schema_path="properties.environment.additionalProperties.type",
+ relative_schema='{"type": "string"}',
+ expected="string",
+ validator="type",
+ found="True",
+ ),
+]
+
+
+def json_from_asset(file_name: str) -> JSON:
+ """Load a json file from disk."""
+ file = Path(__file__).parent / file_name
+ with file.open(encoding="utf-8") as f:
+ return json.load(f) # type: ignore[no-any-return]
+
+
+def jsonify(data: Any) -> JSON: # noqa: ANN401
+ """Convert object in JSON data structure."""
+ return json.loads(json.dumps(data, default=vars, sort_keys=True)) # type: ignore[no-any-return]
+
+
+@pytest.mark.parametrize("index", range(1))
+def test_schema(index: int) -> None:
+ """Test the schema validator."""
+ schema = json_from_asset(f"assets/validate{index}_schema.json")
+ data = json_from_asset(f"assets/validate{index}_data.json")
+ expected = json_from_asset(f"assets/validate{index}_expected.json")
+
+ # ensure we produce consistent results between runs
+ for _ in range(1, 100):
+ found_errors = validate(schema=schema, data=data)
+ # ensure returned results are already sorted, as we assume our class
+ # knows how to sort itself
+ assert sorted(found_errors) == found_errors, "multiple errors not sorted"
+
+ found_errors_json = jsonify(found_errors)
+ assert (
+ found_errors_json == expected
+ ), f"inconsistent returns: {found_errors_json}"
+
+
+def test_json_path() -> None:
+ """Test json_path function."""
+ assert json_path(["a", 1, "b"]) == "$.a[1].b"
diff --git a/tools/get-version.sh b/tools/get-version.sh
new file mode 100755
index 0000000..67033f8
--- /dev/null
+++ b/tools/get-version.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+set -e
+{
+ python3 -c "import setuptools_scm" || python3 -m pip install --user setuptools-scm
+} 1>&2 # redirect stdout to stderr to avoid polluting the output
+python3 -m setuptools_scm | \
+ sed 's/Guessed Version\([^+]\+\).*/\1/'
diff --git a/tools/update-version.sh b/tools/update-version.sh
new file mode 100755
index 0000000..a227023
--- /dev/null
+++ b/tools/update-version.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+DIR=$(dirname "$0")
+VERSION=$(./tools/get-version.sh)
+mkdir -p "${DIR}/../dist"
+sed -e "s/VERSION_PLACEHOLDER/${VERSION}/" \
+ "${DIR}/../dist/python-ansible-compat.spec.in" \
+ > "${DIR}/../dist/python-ansible-compat.spec"
diff --git a/tox.ini b/tox.ini
new file mode 100644
index 0000000..88ce66a
--- /dev/null
+++ b/tox.ini
@@ -0,0 +1,191 @@
+[tox]
+minversion = 4.0.0
+envlist =
+ lint
+ pkg
+ docs
+ py
+ py-devel
+ py39-ansible212
+ py39-ansible213
+ py39-ansible214
+ py39-ansible215
+ py310-ansible212
+ py310-ansible213
+ py310-ansible214
+ py310-ansible215
+ py311-ansible212
+ py311-ansible213
+ py311-ansible214
+ py311-ansible215
+ py312-ansible216
+
+isolated_build = true
+skip_missing_interpreters = True
+requires =
+ tox >= 4.6.3
+ setuptools >= 65.3.0 # editable installs
+
+[testenv]
+description =
+ Run the tests
+ devel: ansible devel branch
+ ansible212: ansible-core 2.12
+ ansible213: ansible-core 2.13
+ ansible214: ansible-core 2.14
+ ansible215: ansible-core 2.15
+ ansible216: ansible-core 2.16
+
+deps =
+ ansible212: ansible-core>=2.12,<2.13
+ ansible213: ansible-core>=2.13,<2.14
+ ansible214: ansible-core>=2.14,<2.15
+ ansible215: ansible-core>=2.15,<2.16
+ ansible216: ansible-core>=2.16,<2.17
+
+ devel: ansible-core @ git+https://github.com/ansible/ansible.git@c5d18c39d81e2b3b10856b2fb76747230e4fac4a # GPLv3+
+ # avoid installing ansible-core on -devel envs:
+ !devel: ansible-core
+extras =
+ test
+
+commands =
+ sh -c "ansible --version | head -n 1"
+ # We add coverage options but not making them mandatory as we do not want to force
+ # pytest users to run coverage when they just want to run a single test with `pytest -k test`
+ coverage run -m pytest {posargs:}
+ sh -c "coverage combine -a -q --data-file=.coverage {toxworkdir}/.coverage.*"
+ # needed for upload to codecov.io
+ -sh -c "COVERAGE_FILE= coverage xml --ignore-errors -q --fail-under=0"
+ # needed for vscode integration due to https://github.com/ryanluker/vscode-coverage-gutters/issues/403
+ -sh -c "COVERAGE_FILE= coverage lcov --ignore-errors -q --fail-under=0"
+ sh -c "COVERAGE_FILE= coverage report"
+ # We fail if files are modified at the end
+ git diff --exit-code
+
+commands_pre =
+ # safety measure to assure we do not accidentally run tests with broken dependencies
+ {envpython} -m pip check
+ # cleaning needed to prevent errors between runs
+ sh -c "rm -f .coverage {toxworkdir}/.coverage.* 2>/dev/null || true"
+passenv =
+ CURL_CA_BUNDLE # https proxies, https://github.com/tox-dev/tox/issues/1437
+ FORCE_COLOR
+ HOME
+ NO_COLOR
+ PYTEST_* # allows developer to define their own preferences
+ PY_COLORS
+ REQUESTS_CA_BUNDLE # https proxies
+ SSL_CERT_FILE # https proxies
+ LANG
+ LC_ALL
+ LC_CTYPE
+setenv =
+ ANSIBLE_DEVEL_WARNING='false'
+ COVERAGE_FILE = {env:COVERAGE_FILE:{toxworkdir}/.coverage.{envname}}
+ COVERAGE_PROCESS_START={toxinidir}/pyproject.toml
+ PIP_DISABLE_PIP_VERSION_CHECK = 1
+ PIP_CONSTRAINT = {toxinidir}/requirements.txt
+ PRE_COMMIT_COLOR = always
+ PYTEST_REQPASS = 93
+ FORCE_COLOR = 1
+allowlist_externals =
+ ansible
+ git
+ sh
+# https://tox.wiki/en/latest/upgrading.html#editable-mode
+package = editable
+
+[testenv:lint]
+description = Run all linters
+# locked basepython is needed because to keep constrains.txt predictable
+basepython = python3.10
+deps =
+ pre-commit>=2.6.0
+skip_install = true
+usedevelop = false
+commands =
+ pre-commit run -a --show-diff-on-failure {posargs:}
+ pre-commit run -a pip-compile
+passenv =
+ {[testenv]passenv}
+ PRE_COMMIT_HOME
+setenv =
+ {[testenv]setenv}
+ PIP_CONSTRAINT = /dev/null
+
+[testenv:deps]
+description = Bump all test dependencies
+basepython = {[testenv:lint]basepython}
+envdir = {toxworkdir}/lint
+deps = {[testenv:lint]deps}
+skip_install = true
+commands =
+ pre-commit run -a --hook-stage manual pip-compile-upgrade
+ {[testenv:lint]commands}
+setenv =
+ {[testenv]setenv}
+ PIP_CONSTRAINT = /dev/null
+
+[testenv:pkg]
+description =
+ Build package, verify metadata, install package and assert behavior when ansible is missing.
+deps =
+ build >= 0.9.0
+ twine >= 4.0.1
+skip_install = true
+# Ref: https://twitter.com/di_codes/status/1044358639081975813
+commands =
+ # build wheel and sdist using PEP-517
+ {envpython} -c 'import os.path, shutil, sys; \
+ dist_dir = os.path.join("{toxinidir}", "dist"); \
+ os.path.isdir(dist_dir) or sys.exit(0); \
+ print("Removing \{!s\} contents...".format(dist_dir), file=sys.stderr); \
+ shutil.rmtree(dist_dir)'
+ {envpython} -m build \
+ --outdir {toxinidir}/dist/ \
+ {toxinidir}
+ # Validate metadata using twine
+ twine check --strict {toxinidir}/dist/*
+ # Install the wheel
+ sh -c "python3 -m pip install {toxinidir}/dist/*.whl"
+ pip uninstall -y ansible-compat
+
+[testenv:py]
+description = Run the tests with {basepython} ansible-core 2.12+
+deps =
+ {[testenv]deps}
+ ansible-core>=2.12
+
+[testenv:rpm]
+description = Use packit to build RPM (requires RPM based Linux distro)
+deps =
+ packitos
+commands =
+ packit build in-mock
+
+[testenv:docs]
+description = Build docs
+commands =
+ mkdocs {posargs:build} --strict
+extras = docs
+passenv = *
+
+[testenv:smoke]
+description = Run ansible-lint own testing with current code from compat library
+commands_pre =
+ ansible localhost -m ansible.builtin.git -a 'repo=https://github.com/ansible/ansible-lint dest={envdir}/tmp/ansible-lint'
+ pip install -e "{envdir}/tmp/ansible-lint[test]"
+commands =
+ bash -c "pip freeze|grep ansible"
+ pytest -k role
+deps =
+ ansible-core
+setenv =
+ {[testenv]setenv}
+ PIP_CONSTRAINT = /dev/null
+ PYTEST_REQPASS = 0
+changedir = {envdir}/tmp/ansible-lint
+allowlist_externals =
+ pwd
+ bash