diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-13 12:04:41 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-13 12:04:41 +0000 |
commit | 975f66f2eebe9dadba04f275774d4ab83f74cf25 (patch) | |
tree | 89bd26a93aaae6a25749145b7e4bca4a1e75b2be /ansible_collections/community/hashi_vault | |
parent | Initial commit. (diff) | |
download | ansible-975f66f2eebe9dadba04f275774d4ab83f74cf25.tar.xz ansible-975f66f2eebe9dadba04f275774d4ab83f74cf25.zip |
Adding upstream version 7.7.0+dfsg.upstream/7.7.0+dfsg
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'ansible_collections/community/hashi_vault')
404 files changed, 29118 insertions, 0 deletions
diff --git a/ansible_collections/community/hashi_vault/.git-blame-ignore-revs b/ansible_collections/community/hashi_vault/.git-blame-ignore-revs new file mode 100644 index 000000000..55ec14d44 --- /dev/null +++ b/ansible_collections/community/hashi_vault/.git-blame-ignore-revs @@ -0,0 +1,4 @@ +# .git-blame-ignore-revs + +# update license headers +a1ab6601956eece531d46b3348a4e3b7adfa57e2 diff --git a/ansible_collections/community/hashi_vault/.github/actions/ansible-codecov/action.yml b/ansible_collections/community/hashi_vault/.github/actions/ansible-codecov/action.yml new file mode 100644 index 000000000..8acc1a83e --- /dev/null +++ b/ansible_collections/community/hashi_vault/.github/actions/ansible-codecov/action.yml @@ -0,0 +1,78 @@ +--- +name: Upload Ansible coverage reports with flags +description: Does separate codecov uploads with flags taken from ansible-test's --group-by options +inputs: + additional-flags: + description: Additional custom flags (comma separated) to be added to all reports. + required: false + directory-flag-pattern: + description: | + A pattern to infer flags from directory names. For example this pattern: + {ansible-%}=python-{py%}={%} + applied to a directory name like: + stable-2.11=python-3.9=rando + Results in the flags: + ansible-stable-2.11,py3.9,rando + required: false + file-flag-pattern: + description: | + A pattern to infer flags from coverage reports. For example this pattern: + coverage={%}={target_%}={env_%}=python-{py%}.xml + applied to a file name like: + coverage=integration=lookup_some_plugin=docker-default=python-3.9.xml + Results in the flags: + integration,target_lookup_some_plugin,env_docker-default,py3.9 + required: false + default: coverage={%}={target_%}={env_%}.xml + directory: + description: The directory to scan recursively. Defaults to current working directory. + required: false + codecov-uploader-version: + description: | + The version of the codecov uploader to use. 'latest' (default) always gets the latest. + See https://uploader.codecov.io/linux for the versions available. + required: false + default: latest + fail-on-error: + description: | + If 'true' then codecov will be called with '-Z', which will fail the build on an error. + Any value other than 'true' will be treated as false. + required: false + default: 'true' +runs: + using: composite + steps: + - name: Download and verify codecov uploader + shell: bash + run: | + echo "::group::Installing codecov uploader" + + mkdir -p /tmp/ccbin + cd /tmp/ccbin + + echo "/tmp/ccbin" >> ${GITHUB_PATH} + + if command -v codecov ; then + ./codecov --version + else + curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --import # One-time step + curl -Os https://uploader.codecov.io/${{ inputs.codecov-uploader-version }}/linux/codecov + curl -Os https://uploader.codecov.io/${{ inputs.codecov-uploader-version }}/linux/codecov.SHA256SUM + curl -Os https://uploader.codecov.io/${{ inputs.codecov-uploader-version }}/linux/codecov.SHA256SUM.sig + gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM + shasum -a 256 -c codecov.SHA256SUM + chmod +x codecov + + ./codecov --version + fi + + echo "::endgroup::" + + - shell: bash + run: >- + python -u "${{ github.action_path }}/process.py" + --directory "${{ inputs.directory }}" + --directory-flag-pattern "${{ inputs.file-flag-pattern }}" + --file-flag-pattern "${{ inputs.file-flag-pattern }}" + --additional-flags "${{ inputs.additional-flags }}" + ${{ inputs.fail-on-error == 'true' && '--fail-on-error' || '' }} diff --git a/ansible_collections/community/hashi_vault/.github/actions/ansible-codecov/process.py b/ansible_collections/community/hashi_vault/.github/actions/ansible-codecov/process.py new file mode 100755 index 000000000..0836598e7 --- /dev/null +++ b/ansible_collections/community/hashi_vault/.github/actions/ansible-codecov/process.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright: (c) 2021, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import sys +import subprocess +import re +import getopt +from pathlib import Path + + +def get_flags(pattern, input): + patpat = r'\{([^\}]+)\}' + + pats = re.findall(patpat, pattern) + matcher = re.sub(patpat, r'(.*?)', pattern) + match = re.search(matcher, input) + + if match: + return [pats[i].replace('%', result) for i, result in enumerate(match.groups())] + + return None + + +def main(argv): + additional_flags = file_flag_pattern = directory_flag_pattern = directory = fail_on_error = None + + opts, args = getopt.getopt(argv, '', [ + 'directory=', + 'directory-flag-pattern=', + 'file-flag-pattern=', + 'additional-flags=', + 'fail-on-error', + ]) + + for opt, arg in opts: + if opt == '--directory': + directory = arg + elif opt == '--directory-flag-pattern': + directory_flag_pattern = arg + elif opt == '--file-flag-pattern': + file_flag_pattern = arg + elif opt == '--additional-flags': + additional_flags = arg + elif opt == '--fail-on-error': + fail_on_error = True + + extra_flags = additional_flags.split(',') if additional_flags else [] + + flags = {} + + directory = Path(directory) if directory else Path.cwd() + + for f in directory.rglob('*'): + if f.is_file(): + iflags = set() + if directory_flag_pattern: + for part in f.parent.parts: + dflags = get_flags(directory_flag_pattern, part) + if dflags: + iflags.update(dflags) + + fflags = get_flags(file_flag_pattern, str(f.name)) + if fflags: + iflags.update(fflags) + + for flag in iflags: + flags.setdefault(flag, []).append(str(f.resolve())) + + logextra = ' (+%r)' % extra_flags if extra_flags else '' + + for flag, files in flags.items(): + cmd = ['codecov', '-F', flag] + [cmd.extend(['-F', extra]) for extra in extra_flags] + [cmd.extend(['-f', file]) for file in files] + if fail_on_error: + cmd.append('-Z') + + print('::group::Flag: %s%s' % (flag, logextra)) + + print('Executing: %r' % cmd) + subprocess.run(cmd, stderr=subprocess.STDOUT, check=True) + + print('::endgroup::') + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/ansible_collections/community/hashi_vault/.github/actions/collection-via-git/action.yml b/ansible_collections/community/hashi_vault/.github/actions/collection-via-git/action.yml new file mode 100644 index 000000000..43895af44 --- /dev/null +++ b/ansible_collections/community/hashi_vault/.github/actions/collection-via-git/action.yml @@ -0,0 +1,50 @@ +--- +name: Ansible Collection via GitHub +description: Install Ansible collections direct from GitHub repositories without using ansible-galaxy. +branding: + icon: git-branch + color: yellow +inputs: + collection: + description: The name of the collection in namespace.collection_name form. + required: true + ref: + description: The git ref to install. Defaults to the latest release as listed in GitHub releases. Only supports branches and tags. + required: false + path: + description: The path to clone it to. Defaults to ansible_collections/namespace/collection_name. + required: false +runs: + using: composite + steps: + - shell: bash + run: | + COLLECTION="${{ inputs.collection }}" + P_PATH="${{ inputs.path }}" + P_REF="${{ inputs.ref }}" + + NS="${COLLECTION%.*}" + CN="${COLLECTION#*.}" + + # only collections in the ansible-collections organization are supported right now + URLBASE="https://github.com/ansible-collections/${COLLECTION}" + URLCLONE="${URLBASE}.git" + URLLATEST="${URLBASE}/releases/latest" + + if [[ -n "${P_PATH}" ]] + then + OUTPATH="${P_PATH}" + else + OUTPATH="ansible_collections/${NS}/${CN}" + fi + + if [[ -n "${P_REF}" ]] + then + REF="${P_REF}" + else + # credit to https://gist.github.com/lukechilds/a83e1d7127b78fef38c2914c4ececc3c#gistcomment-3294173 + latest=$(curl -fs -o/dev/null -w %{redirect_url} "${URLLATEST}") + REF=$(basename ${latest}) + fi + + git clone --depth=1 --branch "${REF}" "${URLCLONE}" "${OUTPATH}" diff --git a/ansible_collections/community/hashi_vault/.github/actions/docker-image-versions/action.yml b/ansible_collections/community/hashi_vault/.github/actions/docker-image-versions/action.yml new file mode 100644 index 000000000..766067df7 --- /dev/null +++ b/ansible_collections/community/hashi_vault/.github/actions/docker-image-versions/action.yml @@ -0,0 +1,47 @@ +--- +name: Get a list of docker image versions +description: Gets a list of docker image versions (via tags), limited to a specified number of major, minor, and micro combinations. +outputs: + versions: + description: JSON encoded list of versions. + value: ${{ steps.versions.outputs.versions }} +inputs: + image: + description: The docker image name. + required: false + default: vault + num_major_versions: + description: Number of unique major versions to return. + required: false + default: '1' + num_minor_versions: + description: Number of unique minor versions to return. + required: false + default: '1' + num_micro_versions: + description: Number of unique micro versions to return. + required: false + default: '1' + include_prerelease: + description: If 'true' then pre-release versions are included. Any value other than 'true' will be treated as false. + required: false + include_postrelease: + description: If 'true' then post-release versions are included. Any value other than 'true' will be treated as false. + required: false +runs: + using: composite + steps: + - name: Requirements + shell: bash + run: pip install -r "${{ github.action_path }}/requirements.txt" + + - shell: bash + id: versions + run: >- + python -u "${{ github.action_path }}/versions.py" + --image "${{ inputs.image }}" + --num_major_versions "${{ inputs.num_major_versions }}" + --num_minor_versions "${{ inputs.num_minor_versions }}" + --num_micro_versions "${{ inputs.num_micro_versions }}" + ${{ inputs.include_prerelease == 'true' && '--include_prerelease' || '' }} + ${{ inputs.include_postrelease == 'true' && '--include_postrelease' || '' }} diff --git a/ansible_collections/community/hashi_vault/.github/actions/docker-image-versions/requirements.txt b/ansible_collections/community/hashi_vault/.github/actions/docker-image-versions/requirements.txt new file mode 100644 index 000000000..c2042f5e4 --- /dev/null +++ b/ansible_collections/community/hashi_vault/.github/actions/docker-image-versions/requirements.txt @@ -0,0 +1,3 @@ +requests +packaging +urllib3 >= 1.15 diff --git a/ansible_collections/community/hashi_vault/.github/actions/docker-image-versions/versions.py b/ansible_collections/community/hashi_vault/.github/actions/docker-image-versions/versions.py new file mode 100755 index 000000000..9d7fcea2d --- /dev/null +++ b/ansible_collections/community/hashi_vault/.github/actions/docker-image-versions/versions.py @@ -0,0 +1,121 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright: (c) 2021, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import sys +import getopt + +import json + +import requests +from urllib3.util.retry import Retry +from requests.adapters import HTTPAdapter +from warnings import warn +from packaging import version + + +TAG_URI = 'https://registry.hub.docker.com/v2/repositories/library/%s/tags?page_size=1024' + + +class WarningRetry(Retry): + def new(self, **kwargs): + if self.total > 0: + warn('Error on request. Retries remaining: %i' % (self.total,)) + return super().new(**kwargs) + + +def main(argv): + image = None + include_prerelease = include_postrelease = False + num_major_versions = 1 + num_minor_versions = 3 + num_micro_versions = 1 + + opts, args = getopt.getopt(argv, '', [ + 'image=', + 'num_major_versions=', + 'num_minor_versions=', + 'num_micro_versions=', + 'include_prerelease', + 'include_postrelease', + ]) + + for opt, arg in opts: + if opt == '--image': + image = arg + elif opt == '--num_major_versions': + num_major_versions = int(arg) + elif opt == '--num_minor_versions': + num_minor_versions = int(arg) + elif opt == '--num_micro_versions': + num_micro_versions = int(arg) + elif opt == '--include_prerelease': + include_prerelease = True + elif opt == '--include_postrelease': + include_postrelease = True + + if image is None: + raise ValueError('image must be supplied.') + + tag_url = TAG_URI % image + + sess = requests.Session() + retry = WarningRetry(total=5, backoff_factor=0.2, respect_retry_after_header=False) + adapter = HTTPAdapter(max_retries=retry) + sess.mount('https://', adapter) + + response = sess.get(tag_url) + + versions = [] + for tag in response.json()['results']: + vobj = None + try: + vobj = version.parse(tag['name']) + except Exception: + continue + else: + if not isinstance(vobj, version.Version): + continue + + if vobj.is_prerelease is include_prerelease and vobj.is_postrelease is include_postrelease: + versions.append(vobj) + + majors = set() + minors = set() + micros = set() + keep = [] + for ver in sorted(versions, reverse=True): + if ver.major not in majors: + if len(majors) == num_major_versions: + break + majors.add(ver.major) + minors.clear() + micros.clear() + + if ver.minor not in minors: + if len(minors) == num_minor_versions: + continue + minors.add(ver.minor) + micros.clear() + + if ver.micro not in micros: + if len(micros) == num_micro_versions: + continue + micros.add(ver.micro) + + keep.append(str(ver)) + + with open(os.environ['GITHUB_OUTPUT'], 'a') as f: + f.write('versions=') + json.dump(keep, f) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/ansible_collections/community/hashi_vault/.github/actions/pull-ansible-test-images/action.yml b/ansible_collections/community/hashi_vault/.github/actions/pull-ansible-test-images/action.yml new file mode 100644 index 000000000..07fd051f6 --- /dev/null +++ b/ansible_collections/community/hashi_vault/.github/actions/pull-ansible-test-images/action.yml @@ -0,0 +1,52 @@ +--- +name: Pull ansible-test docker images +description: Performs a docker pull against ansible-test docker image aliases +inputs: + working-directory: + description: The working directory to operate under. This should be the collection's directory. + required: false + ansible-test-invocation: + description: The options that will be passed to ansible-test. + required: true +runs: + using: composite + steps: + - shell: bash + run: pip install packaging + + - shell: python + run: | + import os + import sys + from packaging import version + from ansible.release import __version__ as ansible_version + + ver = version.parse(ansible_version) + cutoff = version.parse('2.12') + + nwd = r'${{ inputs.working-directory }}' + if nwd: + os.chdir(nwd) + + # not using ver >= cutoff because of pre-release/dev comparison logic + if ver.major > cutoff.major or (ver.major == cutoff.major and ver.minor >= cutoff.minor): + invo = r'${{ inputs.ansible-test-invocation }}' + sys.exit(os.system('ansible-test %s --prime-containers' % invo)) + + try: + from ansible_test._internal.util_common import get_docker_completion + except ImportError: + # 2.9 + from ansible_test._internal.util import get_docker_completion + + context = 'collection' + wanted = ['default'] + + dockers = get_docker_completion() + + for alias, data in dockers.items(): + if alias in wanted: + if 'context' not in data or data['context'] == context: + image = data['name'] + print('pulling %s' % image) + os.system('docker pull %s' % image) diff --git a/ansible_collections/community/hashi_vault/.github/workflows/ansible-builder.yml b/ansible_collections/community/hashi_vault/.github/workflows/ansible-builder.yml new file mode 100644 index 000000000..10aeb5c2d --- /dev/null +++ b/ansible_collections/community/hashi_vault/.github/workflows/ansible-builder.yml @@ -0,0 +1,42 @@ +--- +name: ansible-builder +on: + push: + paths: + - '.github/workflows/ansible-builder.yml' + - 'meta/execution-environment.yml' + - 'meta/ee-requirements.txt' + pull_request: + paths: + - '.github/workflows/ansible-builder.yml' + - 'meta/execution-environment.yml' + - 'meta/ee-requirements.txt' + schedule: + - cron: '0 13 * * *' + +env: + NAMESPACE: community + COLLECTION_NAME: hashi_vault + +jobs: + builder: + name: ansible-builder requirements + runs-on: ubuntu-latest + steps: + - name: Check out code + uses: actions/checkout@v3 + with: + path: ansible_collections/${{ env.NAMESPACE }}/${{ env.COLLECTION_NAME }} + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: 3.9 + + - name: Install ansible-builder + run: pip install ansible-builder + + # this is kind of a naive check, since we aren't comparing the output to anything to verify + # so the only we'll catch with this is an egregious error that causes builder to exit nonzero + - name: Verify Requirements + run: ansible-builder introspect --sanitize . diff --git a/ansible_collections/community/hashi_vault/.github/workflows/ansible-test.yml b/ansible_collections/community/hashi_vault/.github/workflows/ansible-test.yml new file mode 100644 index 000000000..25857dadb --- /dev/null +++ b/ansible_collections/community/hashi_vault/.github/workflows/ansible-test.yml @@ -0,0 +1,463 @@ +name: CI +on: + # Run CI against all pushes (direct commits, also merged PRs), Pull Requests + push: + paths-ignore: + - 'docs/**' + - '.github/workflows/_shared-*' + - '.github/workflows/docs*.yml' + - '.github/actions/docs/**' + pull_request: + paths-ignore: + - 'docs/**' + - '.github/workflows/_shared-*' + - '.github/workflows/docs*.yml' + - '.github/actions/docs/**' + schedule: + - cron: '0 14 * * *' +env: + NAMESPACE: community + COLLECTION_NAME: hashi_vault + ANSIBLE_FORCE_COLOR: true + ANSIBLE_COLLECTIONS_PATHS: ${{ github.workspace }} + +jobs: + +### +# Sanity tests (REQUIRED) +# https://docs.ansible.com/ansible/latest/dev_guide/testing_sanity.html + + sanity: + name: Sanity (Ⓐ${{ matrix.ansible }}) + runs-on: ${{ matrix.runner }} + strategy: + matrix: + runner: + - ubuntu-latest + test_container: + - default + ansible: + - stable-2.11 + - stable-2.12 + - stable-2.13 + - stable-2.14 + - stable-2.15 + - devel + steps: + + # ansible-test requires the collection to be in a directory in the form + # .../ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}/ + - name: Initialize env vars + uses: briantist/ezenv@v1 + with: + env: | + COLLECTION_PATH=ansible_collections/${NAMESPACE}/${COLLECTION_NAME} + TEST_INVOCATION="sanity --docker ${{ matrix.test_container }} -v --color ${{ github.event_name != 'schedule' && '--coverage' || '' }}" + + - name: Check out code + uses: actions/checkout@v3 + with: + path: ${{ env.COLLECTION_PATH }} + + - name: Link to .github # easier access to local actions + run: ln -s "${COLLECTION_PATH}/.github" .github + + - name: Set up Python + uses: actions/setup-python@v4 + with: + # it is just required to run that once as "ansible-test sanity" in the docker image + # will run on all python versions it supports. + python-version: 3.9 + + # Install the head of the given branch (devel, stable-2.14) + - name: Install ansible-base (${{ matrix.ansible }}) + run: pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible }}.tar.gz --disable-pip-version-check + + - name: Pull Ansible test images + timeout-minutes: 5 + continue-on-error: true + uses: ./.github/actions/pull-ansible-test-images + with: + working-directory: ${{ env.COLLECTION_PATH }} + ansible-test-invocation: ${{ env.TEST_INVOCATION }} + + # run ansible-test sanity inside of Docker. + # The docker container has all the pinned dependencies that are required + # and all python versions ansible supports. + - name: Run sanity tests + run: ansible-test ${{ env.TEST_INVOCATION }} + working-directory: ${{ env.COLLECTION_PATH }} + + - name: Generate coverage report + if: ${{ github.event_name != 'schedule' }} + run: ansible-test coverage xml -v --requirements --group-by command --group-by environment --group-by target + working-directory: ${{ env.COLLECTION_PATH }} + + - name: Upload ${{ github.job }} coverage reports + if: ${{ github.event_name != 'schedule' }} + uses: actions/upload-artifact@v3 + with: + name: coverage=${{ github.job }}=ansible_${{ matrix.ansible }}=data + path: ${{ env.COLLECTION_PATH }}/tests/output/reports/ + if-no-files-found: error + retention-days: 1 + + + units: + runs-on: ${{ matrix.runner }} + name: Units (Ⓐ${{ matrix.ansible }}) + strategy: + # As soon as the first unit test fails, cancel the others to free up the CI queue + fail-fast: true + matrix: + runner: + - ubuntu-latest + test_container: + - default + ansible: + - stable-2.11 + - stable-2.12 + - stable-2.13 + - stable-2.14 + - stable-2.15 + - devel + + steps: + - name: Initialize env vars + uses: briantist/ezenv@v1 + with: + env: | + COLLECTION_PATH=ansible_collections/${NAMESPACE}/${COLLECTION_NAME} + TEST_INVOCATION="units --color --docker ${{ matrix.test_container }} ${{ github.event_name != 'schedule' && '--coverage' || '' }}" + + - name: Check out code + uses: actions/checkout@v3 + with: + path: ${{ env.COLLECTION_PATH }} + + - name: Link to .github # easier access to local actions + run: ln -s "${COLLECTION_PATH}/.github" .github + + - name: Set up Python + uses: actions/setup-python@v4 + with: + # it is just required to run that once as "ansible-test units" in the docker image + # will run on all python versions it supports. + python-version: 3.9 + + - name: Install ansible-base (${{ matrix.ansible }}) + run: pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible }}.tar.gz --disable-pip-version-check + + - name: Pull Ansible test images + timeout-minutes: 5 + continue-on-error: true + uses: ./.github/actions/pull-ansible-test-images + with: + working-directory: ${{ env.COLLECTION_PATH }} + ansible-test-invocation: ${{ env.TEST_INVOCATION }} + + # Run the unit tests + - name: Run unit test + run: ansible-test ${{ env.TEST_INVOCATION }} + working-directory: ${{ env.COLLECTION_PATH }} + + - name: Generate coverage report + if: ${{ github.event_name != 'schedule' }} + run: ansible-test coverage xml -v --requirements --group-by command --group-by environment --group-by target + working-directory: ${{ env.COLLECTION_PATH }} + + - name: Upload ${{ github.job }} coverage reports + if: ${{ github.event_name != 'schedule' }} + uses: actions/upload-artifact@v3 + with: + name: coverage=${{ github.job }}=ansible_${{ matrix.ansible }}=data + path: ${{ env.COLLECTION_PATH }}/tests/output/reports/ + if-no-files-found: error + retention-days: 1 + +### +# Integration tests (RECOMMENDED) +# +# https://docs.ansible.com/ansible/latest/dev_guide/testing_integration.html + + integration: + runs-on: ${{ matrix.runner }} + name: I (Ⓐ${{ matrix.ansible }}+py${{ matrix.python }}) + strategy: + fail-fast: false + matrix: + runner: + - ubuntu-latest + test_container: + - default + ansible: + - stable-2.11 + - stable-2.12 + - stable-2.13 + - stable-2.14 + - stable-2.15 + - devel + python: + - '3.6' + - '3.7' + - '3.8' + - '3.9' + - '3.10' + - '3.11' + exclude: + # https://docs.ansible.com/ansible/devel/installation_guide/intro_installation.html#control-node-requirements + # https://docs.ansible.com/ansible/devel/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix + - ansible: 'stable-2.11' + python: '3.10' + - ansible: 'stable-2.11' + python: '3.11' + - ansible: 'stable-2.12' + python: '3.11' + - ansible: 'stable-2.13' + python: '3.11' + - ansible: 'stable-2.15' + python: '3.6' + - ansible: 'stable-2.15' + python: '3.7' + - ansible: 'stable-2.15' + python: '3.8' + - ansible: 'devel' + python: '3.6' + - ansible: 'devel' + python: '3.7' + - ansible: 'devel' + python: '3.8' + + steps: + - name: Initialize env vars + uses: briantist/ezenv@v1 + with: + env: | + COLLECTION_PATH=ansible_collections/${NAMESPACE}/${COLLECTION_NAME} + COLLECTION_INTEGRATION_PATH=${COLLECTION_PATH}/tests/integration + COLLECTION_INTEGRATION_TARGETS=${COLLECTION_INTEGRATION_PATH}/targets + TEST_INVOCATION="integration -v --color --retry-on-error --continue-on-error --python ${{ matrix.python }} --docker ${{ matrix.test_container }} ${{ github.event_name != 'schedule' && '--coverage' || '' }} --docker-network hashi_vault_default" + + - name: Check out code + uses: actions/checkout@v3 + with: + path: ${{ env.COLLECTION_PATH }} + + - name: Link to .github # easier access to local actions + run: ln -s "${COLLECTION_PATH}/.github" .github + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: 3.9 + + - name: Get Vault versions + id: vault_versions + uses: ./.github/actions/docker-image-versions + with: + num_major_versions: 1 + num_minor_versions: 2 + num_micro_versions: 1 + + - name: Install ansible-base (${{ matrix.ansible }}) + run: pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible }}.tar.gz --disable-pip-version-check + + - name: Pull Ansible test images + timeout-minutes: 5 + continue-on-error: true + uses: ./.github/actions/pull-ansible-test-images + with: + working-directory: ${{ env.COLLECTION_PATH }} + ansible-test-invocation: ${{ env.TEST_INVOCATION }} + + - name: Set Vault Version (older) + uses: briantist/ezenv@v1 + with: + env: VAULT_VERSION=${{ fromJSON(steps.vault_versions.outputs.versions)[1] }} + + - name: Prepare docker dependencies (Vault ${{ env.VAULT_VERSION }}) + run: ./setup.sh -e vault_version=${VAULT_VERSION} + working-directory: ${{ env.COLLECTION_INTEGRATION_TARGETS }}/setup_localenv_gha + + - name: Run integration test (Vault ${{ env.VAULT_VERSION }}) + run: ansible-test ${{ env.TEST_INVOCATION }} + working-directory: ${{ env.COLLECTION_PATH }} + + - name: Set Vault Version (newer) + uses: briantist/ezenv@v1 + with: + env: VAULT_VERSION=${{ fromJSON(steps.vault_versions.outputs.versions)[0] }} + + - name: Prepare docker dependencies (Vault ${{ env.VAULT_VERSION }}) + run: ./setup.sh -e vault_version=${VAULT_VERSION} + working-directory: ${{ env.COLLECTION_INTEGRATION_TARGETS }}/setup_localenv_gha + + - name: Run integration test (Vault ${{ env.VAULT_VERSION }}) + run: ansible-test ${{ env.TEST_INVOCATION }} + working-directory: ${{ env.COLLECTION_PATH }} + + # ansible-test support producing code coverage data + - name: Generate coverage report + if: ${{ github.event_name != 'schedule' }} + run: ansible-test coverage xml -v --requirements --group-by command --group-by environment --group-by target + working-directory: ${{ env.COLLECTION_PATH }} + + - name: Upload ${{ github.job }} coverage reports + if: ${{ github.event_name != 'schedule' }} + uses: actions/upload-artifact@v3 + with: + name: coverage=${{ github.job }}=ansible_${{ matrix.ansible }}=${{ matrix.python }}=data + path: ${{ env.COLLECTION_PATH }}/tests/output/reports/ + if-no-files-found: error + retention-days: 1 + + local_test_invocation: + runs-on: ${{ matrix.runner }} + name: LI - ${{ matrix.runner }} (Ⓐ${{ matrix.ansible }}+py${{ matrix.python }}) + strategy: + fail-fast: false + matrix: + ansible: + - stable-2.15 + - devel + python: + - 3.9 + runner: + - ubuntu-latest + test_container: + - default + + steps: + - name: Initialize env vars + uses: briantist/ezenv@v1 + with: + env: | + COLLECTION_PATH=ansible_collections/${NAMESPACE}/${COLLECTION_NAME} + COLLECTION_INTEGRATION_PATH=${COLLECTION_PATH}/tests/integration + COLLECTION_INTEGRATION_TARGETS=${COLLECTION_INTEGRATION_PATH}/targets + DOCKER_TEST_INVOCATION="integration -v --color --retry-on-error --continue-on-error --controller docker:${{ matrix.test_container }},python=${{ matrix.python }} ${{ github.event_name != 'schedule' && '--coverage' || '' }}" + + - name: Check out code + uses: actions/checkout@v3 + with: + path: ${{ env.COLLECTION_PATH }} + + - name: Link to .github # easier access to local actions + run: ln -s "${COLLECTION_PATH}/.github" .github + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python }} + + - name: Install ansible-base (${{ matrix.ansible }}) + run: pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible }}.tar.gz --disable-pip-version-check + + - name: Install community.crypto + uses: ./.github/actions/collection-via-git + with: + collection: community.crypto + + - name: Install community.docker + uses: ./.github/actions/collection-via-git + with: + collection: community.docker + + - name: Pull Ansible test images + timeout-minutes: 5 + continue-on-error: true + uses: ./.github/actions/pull-ansible-test-images + with: + working-directory: ${{ env.COLLECTION_PATH }} + ansible-test-invocation: ${{ env.DOCKER_TEST_INVOCATION }} + + - name: localenv_docker - setup + run: | + pwd + pip install -r files/requirements/requirements.txt -c files/requirements/constraints.txt + ./setup.sh + working-directory: ${{ env.COLLECTION_INTEGRATION_TARGETS }}/setup_localenv_docker + + - name: localenv_docker - Run integration test (in docker) + run: | + ansible-test ${{ env.DOCKER_TEST_INVOCATION }} --docker-network hashi_vault_default + working-directory: ${{ env.COLLECTION_PATH }} + + - name: Run integration again (ensure tests do not break against still-running containers) + run: | + ansible-test ${{ env.DOCKER_TEST_INVOCATION }} --docker-network hashi_vault_default + working-directory: ${{ env.COLLECTION_PATH }} + + #TODO add capability in the Ansible side once vault_list and vault_delete exist + - name: Run a third time, but delete Vault's cubbyhole contents first + working-directory: ${{ env.COLLECTION_PATH }} + env: + VAULT_TOKEN: 47542cbc-6bf8-4fba-8eda-02e0a0d29a0a + VAULT_ADDR: http://vault:8200 + run: | + echo 'vault list cubbyhole \ + | tail -n +3 \ + | xargs -I{} -n 1 vault delete cubbyhole/{}' \ + | docker run --rm --network hashi_vault_default -e VAULT_TOKEN -e VAULT_ADDR -i vault sh + + ansible-test ${{ env.DOCKER_TEST_INVOCATION }} --docker-network hashi_vault_default + + # ansible-test support producing code coverage data + - name: Generate coverage report + if: ${{ github.event_name != 'schedule' }} + run: ansible-test coverage xml -v --requirements --group-by command --group-by environment --group-by target + working-directory: ${{ env.COLLECTION_PATH }} + + - name: Upload ${{ github.job }} coverage reports + if: ${{ github.event_name != 'schedule' }} + uses: actions/upload-artifact@v3 + with: + name: coverage=${{ github.job }}=${{ matrix.runner }}=ansible_${{ matrix.ansible }}=${{ matrix.python }}=data + path: ${{ env.COLLECTION_PATH }}/tests/output/reports/ + if-no-files-found: error + retention-days: 1 + + upload-coverage: + needs: + - sanity + - units + - integration + - local_test_invocation + # don't upload coverage on scheduled runs + # https://github.com/ansible-collections/community.hashi_vault/issues/180 + if: ${{ github.event_name != 'schedule' }} + name: Upload Codecov reports + runs-on: ubuntu-latest + steps: + - name: Check out code + uses: actions/checkout@v3 + + - name: Download artifacts + uses: actions/download-artifact@v3 + with: + path: ./cov + + # Before Ansible 2.12, units always used a "target" of "units", and we don't want a flag of "target_units". + # After 2.12, target can be "controller" or "module_utils" and we'll preserve them for now. + # If we decide that those targets are not helpful, we can simplify processing by removing this run block + # and just having two ansible-codecov calls, one for units (that excludes target) and one for integration. + # That change would also make integration processing faster because we can hardcode the integration flag as an additional flag. + - name: Move Ansible pre-2.12 units + run: | + mkdir ./cov-units-pre2.12 + mv ./cov/coverage=units=ansible_stable-2.11=data ./cov-units-pre2.12 + + - name: Upload Ansible pre-2.12 unit coverage reports to Codecov + uses: ./.github/actions/ansible-codecov + with: + directory: ./cov-units-pre2.12 + additional-flags: units + file-flag-pattern: coverage=units=units={env_%}.xml + directory-flag-pattern: =ansible_{ansible-%}= + + # See the reports at https://codecov.io/gh/ansible-collections/community.hashi_vault + - name: Upload coverage reports to Codecov + uses: ./.github/actions/ansible-codecov + with: + directory: ./cov + directory-flag-pattern: =ansible_{ansible-%}= diff --git a/ansible_collections/community/hashi_vault/.github/workflows/docs-push.yml b/ansible_collections/community/hashi_vault/.github/workflows/docs-push.yml new file mode 100644 index 000000000..82a989507 --- /dev/null +++ b/ansible_collections/community/hashi_vault/.github/workflows/docs-push.yml @@ -0,0 +1,61 @@ +name: Collection Docs +concurrency: + group: docs-push-${{ github.sha }} + cancel-in-progress: true +on: + push: + branches: + - main + tags: + - '*' + schedule: + - cron: '0 13 * * *' + +jobs: + validate-docs: + permissions: + contents: read + name: Validate Ansible Docs + uses: ansible-community/github-docs-build/.github/workflows/_shared-docs-build-push.yml@main + with: + init-lenient: false + init-fail-on-error: true + artifact-upload: false + + build-docs: + permissions: + contents: read + name: Build Ansible Docs + uses: ansible-community/github-docs-build/.github/workflows/_shared-docs-build-push.yml@main + with: + init-dest-dir: docs/preview + # Although we want this to be the most strict, we can't currently achieve this + # with the committed init-dest-dir, hence the validate-docs job, which will + # prevent publish from running in the case of failures. + + publish-docs-surge: + # for now we won't run this on forks + if: github.repository == 'ansible-collections/community.hashi_vault' + permissions: + contents: read + needs: [validate-docs, build-docs] + name: Publish Ansible Docs + uses: ansible-community/github-docs-build/.github/workflows/_shared-docs-build-publish-surge.yml@main + with: + artifact-name: ${{ needs.build-docs.outputs.artifact-name }} + surge-site-name: community-hashi-vault-main.surge.sh + secrets: + SURGE_TOKEN: ${{ secrets.SURGE_TOKEN }} + + publish-docs-gh-pages: + # for now we won't run this on forks + if: github.repository == 'ansible-collections/community.hashi_vault' + permissions: + contents: write + needs: [validate-docs, build-docs] + name: Publish Ansible Docs + uses: ansible-community/github-docs-build/.github/workflows/_shared-docs-build-publish-gh-pages.yml@main + with: + artifact-name: ${{ needs.build-docs.outputs.artifact-name }} + secrets: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/ansible_collections/community/hashi_vault/.github/workflows/docs.yml b/ansible_collections/community/hashi_vault/.github/workflows/docs.yml new file mode 100644 index 000000000..7d66821bb --- /dev/null +++ b/ansible_collections/community/hashi_vault/.github/workflows/docs.yml @@ -0,0 +1,117 @@ +name: Collection Docs +concurrency: + group: docs-pr-${{ github.head_ref }} + cancel-in-progress: true +on: + pull_request_target: + types: [opened, synchronize, reopened, closed] + +env: + SURGE_PR_SITE: community-hashi-vault-pr${{ github.event.number }}.surge.sh + SURGE_MAIN_SITE: community-hashi-vault-main.surge.sh + GHP_BASE_URL: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }} + +jobs: + # this job builds with the most strict options to ensure full compliance + # does not use the collection's committed sphinx-init output + validate-docs: + permissions: + contents: read + name: Validate Ansible Docs + if: github.event.action != 'closed' + uses: ansible-community/github-docs-build/.github/workflows/_shared-docs-build-push.yml@main + with: + artifact-upload: false + init-lenient: false + init-fail-on-error: true + build-ref: refs/pull/${{ github.event.number }}/merge + + # this job builds for the PR comparison and publish, so use the most lenient options + # to give the best possibility of producing a publishable build; the strict build will + # still result in a failure for the PR as a whole, but for review a partial docsite is + # better than none. + # This uses the committed sphinx-init output which already has the lenient options. + build-docs: + permissions: + contents: read + name: Build Ansible Docs + uses: ansible-community/github-docs-build/.github/workflows/_shared-docs-build-pr.yml@main + with: + init-dest-dir: docs/preview + render-file-line: '> * `$<status>` [$<path_tail>](https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/pr/${{ github.event.number }}/$<path_tail>)' + + publish-docs-surge: + # for now we won't run this on forks + if: github.repository == 'ansible-collections/community.hashi_vault' + permissions: + contents: read + needs: [build-docs] + name: Publish Ansible Docs + uses: ansible-community/github-docs-build/.github/workflows/_shared-docs-build-publish-surge.yml@main + with: + artifact-name: ${{ needs.build-docs.outputs.artifact-name }} + surge-site-name: community-hashi-vault-pr${{ github.event.number }}.surge.sh + action: ${{ (github.event.action == 'closed' || needs.build-docs.outputs.changed != 'true') && 'teardown' || 'publish' }} + secrets: + SURGE_TOKEN: ${{ secrets.SURGE_TOKEN }} + + publish-docs-gh-pages: + # for now we won't run this on forks + if: github.repository == 'ansible-collections/community.hashi_vault' + permissions: + contents: write + needs: [build-docs] + name: Publish Ansible Docs + uses: ansible-community/github-docs-build/.github/workflows/_shared-docs-build-publish-gh-pages.yml@main + with: + artifact-name: ${{ needs.build-docs.outputs.artifact-name }} + action: ${{ (github.event.action == 'closed' || needs.build-docs.outputs.changed != 'true') && 'teardown' || 'publish' }} + secrets: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + comment: + permissions: + pull-requests: write + runs-on: ubuntu-latest + needs: [build-docs, publish-docs-gh-pages] + name: PR comments + steps: + - name: PR comment + env: + PR_SITE_URL: https://${{ env.SURGE_PR_SITE }} + MAIN_SITE_URL: https://${{ env.SURGE_MAIN_SITE }} + uses: ansible-community/github-docs-build/actions/ansible-docs-build-comment@main + with: + body-includes: '## Docs Build' + reactions: heart + action: ${{ needs.build-docs.outputs.changed != 'true' && 'remove' || '' }} + on-closed-body: | + ## Docs Build 📝 + + This PR is closed and any previously published docsite has been unpublished. + on-merged-body: | + ## Docs Build 📝 + + Thank you for contribution!✨ + + This PR has been merged and the docs are now incorporated into `main`: + ${{ env.GHP_BASE_URL }}/branch/main + body: | + ## Docs Build 📝 + + Thank you for contribution!✨ + + The docs for **this PR** have been published here: + ${{ env.GHP_BASE_URL }}/pr/${{ github.event.number }} + + You can compare to the docs for the `main` branch here: + ${{ env.GHP_BASE_URL }}/branch/main + + The docsite for **this PR** is also available for download as an artifact from this run: + ${{ needs.build-docs.outputs.artifact-url }} + + File changes: + + ${{ needs.build-docs.outputs.diff-files-rendered }} + + ${{ needs.build-docs.outputs.diff-rendered }} diff --git a/ansible_collections/community/hashi_vault/.github/workflows/github-release.yml b/ansible_collections/community/hashi_vault/.github/workflows/github-release.yml new file mode 100644 index 000000000..6fce7d2e6 --- /dev/null +++ b/ansible_collections/community/hashi_vault/.github/workflows/github-release.yml @@ -0,0 +1,71 @@ +name: GitHub Release +on: + workflow_dispatch: + inputs: + version: + description: 'Version number to release' + required: true + +env: + GHP_BASE_URL: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }} + +jobs: + release: + name: Create GitHub Release + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: 3.9 + + - name: Install PyYaml + run: pip install pyyaml + + - name: Validate version is published to Galaxy + run: curl --head -s -f -o /dev/null https://galaxy.ansible.com/download/community-hashi_vault-${{ github.event.inputs.version }}.tar.gz + + - name: Build release description + shell: python + run: | + import os + import yaml + + ver = '${{ github.event.inputs.version }}' + ver_anchor = str.replace(ver, '.', '-') + + with open('changelogs/changelog.yaml', 'r') as s: + ri = yaml.safe_load(s) + + summary = ri['releases'][ver]['changes']['release_summary'] + reldate = ri['releases'][ver]['release_date'] + + description = '''## Summary + Released: %s + + %s + + --- + + View the [complete changelog](https://github.com/ansible-collections/community.hashi_vault/blob/main/CHANGELOG.rst#v%s) to see all changes. + + View the [full documentation for release ${{ github.event.inputs.version }}](${{ env.GHP_BASE_URL }}/tag/${{ github.event.inputs.version }}). + ''' % (reldate, summary, ver_anchor) + + with open(os.environ['GITHUB_ENV'], 'a') as e: + e.write("RELEASE_DESCRIPTION<<EOF\n%s\nEOF" % description) + + - name: Create Release + id: create_release + # TODO: this action is no longer maintained, replace + # likely candidate: https://github.com/softprops/action-gh-release + uses: actions/create-release@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + tag_name: ${{ github.event.inputs.version }} + release_name: ${{ github.event.inputs.version }} + body: ${{ env.RELEASE_DESCRIPTION }} diff --git a/ansible_collections/community/hashi_vault/CHANGELOG.rst b/ansible_collections/community/hashi_vault/CHANGELOG.rst new file mode 100644 index 000000000..5223d4a97 --- /dev/null +++ b/ansible_collections/community/hashi_vault/CHANGELOG.rst @@ -0,0 +1,604 @@ +=================================== +community.hashi_vault Release Notes +=================================== + +.. contents:: Topics + + +v4.2.1 +====== + +Release Summary +--------------- + +This patch version updates the documentation for the ``vault_kv2_write`` module. There are no functional changes. + +v4.2.0 +====== + +Release Summary +--------------- + +This release contains a new module for KVv2 writes, and a new warning for duplicated term string options in the ``hashi_vault`` lookup. + +Deprecated Features +------------------- + +- hashi_vault lookup - in ``v5.0.0`` duplicate term string options will raise an exception instead of showing a warning (https://github.com/ansible-collections/community.hashi_vault/issues/356). + +Bugfixes +-------- + +- hashi_vault lookup - a term string with duplicate options would silently use the last value. The lookup now shows a warning on option duplication (https://github.com/ansible-collections/community.hashi_vault/issues/349). + +New Modules +----------- + +- vault_kv2_write - Perform a write operation against a KVv2 secret in HashiCorp Vault + +v4.1.0 +====== + +Release Summary +--------------- + +This release brings new generic ``vault_list`` plugins from a new contributor! +There are also some deprecation notices for the next major version, and some updates to documentation attributes. + +Deprecated Features +------------------- + +- ansible-core - support for ``ansible-core`` versions ``2.11`` and ``2.12`` will be dropped in collection version ``5.0.0``, making ``2.13`` the minimum supported version of ``ansible-core`` (https://github.com/ansible-collections/community.hashi_vault/issues/340). +- hvac - the minimum version of ``hvac`` to be supported in collection version ``5.0.0`` will be at least ``1.0.2``; this minimum may be raised before ``5.0.0`` is released, so please subscribe to the linked issue and look out for new notices in the changelog (https://github.com/ansible-collections/community.hashi_vault/issues/324). + +New Plugins +----------- + +Lookup +~~~~~~ + +- vault_list - Perform a list operation against HashiCorp Vault + +New Modules +----------- + +- vault_list - Perform a list operation against HashiCorp Vault + +v4.0.0 +====== + +Release Summary +--------------- + +The next major version of the collection includes previously announced breaking changes to some default values, and improvements to module documentation with attributes that describe the use of action groups and check mode support. + +Minor Changes +------------- + +- modules - all modules now document their action group and support for check mode in their attributes documentation (https://github.com/ansible-collections/community.hashi_vault/issues/197). + +Breaking Changes / Porting Guide +-------------------------------- + +- auth - the default value for ``token_validate`` has changed from ``true`` to ``false``, as previously announced (https://github.com/ansible-collections/community.hashi_vault/issues/248). +- vault_kv2_get lookup - as previously announced, the default value for ``engine_mount_point`` in the ``vault_kv2_get`` lookup has changed from ``kv`` to ``secret`` (https://github.com/ansible-collections/community.hashi_vault/issues/279). + +v3.4.0 +====== + +Release Summary +--------------- + +This release includes a new module, fixes (another) ``requests`` header issue, and updates some inaccurate documentation. +This is the last planned release before v4.0.0. + +Minor Changes +------------- + +- vault_pki_generate_certificate - the documentation has been updated to match the argspec for the default values of options ``alt_names``, ``ip_sans``, ``other_sans``, and ``uri_sans`` (https://github.com/ansible-collections/community.hashi_vault/pull/318). + +Bugfixes +-------- + +- connection options - the ``namespace`` connection option will be forced into a string to ensure cmpatibility with recent ``requests`` versions (https://github.com/ansible-collections/community.hashi_vault/issues/309). + +New Modules +----------- + +- vault_kv2_delete - Delete one or more versions of a secret from HashiCorp Vault's KV version 2 secret store + +v3.3.1 +====== + +Release Summary +--------------- + +No functional changes in this release, this provides updated filter documentation for the public docsite. + +v3.3.0 +====== + +Release Summary +--------------- + +With the release of ``hvac`` version ``1.0.0``, we needed to update ``vault_token_create``'s support for orphan tokens. +The collection's changelog is now viewable in the Ansible documentation site. + +Minor Changes +------------- + +- vault_token_create - creation or orphan tokens uses ``hvac``'s new v1 method for creating orphans, or falls back to the v0 method if needed (https://github.com/ansible-collections/community.hashi_vault/issues/301). + +v3.2.0 +====== + +Release Summary +--------------- + +This release brings support for the ``azure`` auth method, adds ``412`` to the default list of HTTP status codes to be retried, and fixes a bug that causes failures in token auth with ``requests>=2.28.0``. + +Minor Changes +------------- + +- community.hashi_vault collection - add support for ``azure`` auth method, for Azure service principal, managed identity, or plain JWT access token (https://github.com/ansible-collections/community.hashi_vault/issues/293). +- community.hashi_vault retries - `HTTP status code 412 <https://www.vaultproject.io/api-docs#412>`__ has been added to the default list of codes to be retried, for the new `Server Side Consistent Token feature <https://www.vaultproject.io/docs/faq/ssct#q-is-there-anything-else-i-need-to-consider-to-achieve-consistency-besides-upgrading-to-vault-1-10>`__ in Vault Enterprise (https://github.com/ansible-collections/community.hashi_vault/issues/290). + +Bugfixes +-------- + +- community.hashi_vault plugins - tokens will be cast to a string type before being sent to ``hvac`` to prevent errors in ``requests`` when values are ``AnsibleUnsafe`` (https://github.com/ansible-collections/community.hashi_vault/issues/289). +- modules - fix a "variable used before assignment" that cannot be reached but causes sanity test failures (https://github.com/ansible-collections/community.hashi_vault/issues/296). + +v3.1.0 +====== + +Release Summary +--------------- + +A default value that was set incorrectly will be corrected in ``4.0.0``. +A deprecation warning will be shown until then if the value is not specified explicitly. +This version also includes some fixes and improvements to the licensing in the collection, which does not affect any functionality. + +Deprecated Features +------------------- + +- vault_kv2_get lookup - the ``engine_mount_point option`` in the ``vault_kv2_get`` lookup only will change its default from ``kv`` to ``secret`` in community.hashi_vault version 4.0.0 (https://github.com/ansible-collections/community.hashi_vault/issues/279). + +Bugfixes +-------- + +- Add SPDX license headers to individual files (https://github.com/ansible-collections/community.hashi_vault/pull/282). +- Add missing ``BSD-2-Clause.txt`` file for BSD licensed content (https://github.com/ansible-collections/community.hashi_vault/issues/275). +- Use the correct GPL license for plugin_utils (https://github.com/ansible-collections/community.hashi_vault/issues/276). + +v3.0.0 +====== + +Release Summary +--------------- + +Version 3.0.0 of ``community.hashi_vault`` drops support for Ansible 2.9 and ansible-base 2.10. +Several deprecated features have been removed. See the changelog for the full list. + +Deprecated Features +------------------- + +- token_validate options - the shared auth option ``token_validate`` will change its default from ``true`` to ``false`` in community.hashi_vault version 4.0.0. The ``vault_login`` lookup and module will keep the default value of ``true`` (https://github.com/ansible-collections/community.hashi_vault/issues/248). + +Removed Features (previously deprecated) +---------------------------------------- + +- aws_iam auth - the deprecated alias ``aws_iam_login`` for the ``aws_iam`` value of the ``auth_method`` option has been removed (https://github.com/ansible-collections/community.hashi_vault/issues/194). +- community.hashi_vault collection - support for Ansible 2.9 and ansible-base 2.10 has been removed (https://github.com/ansible-collections/community.hashi_vault/issues/189). +- hashi_vault lookup - the deprecated ``[lookup_hashi_vault]`` INI config section has been removed in favor of the collection-wide ``[hashi_vault_collection]`` section (https://github.com/ansible-collections/community.hashi_vault/issues/179). + +v2.5.0 +====== + +Release Summary +--------------- + +This release finally contains dedicated KV plugins and modules, and an exciting new lookup to help use plugin values in module calls. +With that, we also have a guide in the collection docsite for migrating away from the ``hashi_vault`` lookup toward dedicated content. +We are also announcing that the ``token_validate`` option will change its default value in version 4.0.0. +This is the last planned release before 3.0.0. See the porting guide for breaking changes and removed features in the next version. + +Minor Changes +------------- + +- vault_login module & lookup - no friendly error message was given when ``hvac`` was missing (https://github.com/ansible-collections/community.hashi_vault/issues/257). +- vault_pki_certificate - add ``vault_pki_certificate`` to the ``community.hashi_vault.vault`` action group (https://github.com/ansible-collections/community.hashi_vault/issues/251). +- vault_read module & lookup - no friendly error message was given when ``hvac`` was missing (https://github.com/ansible-collections/community.hashi_vault/issues/257). +- vault_token_create - add ``vault_token_create`` to the ``community.hashi_vault.vault`` action group (https://github.com/ansible-collections/community.hashi_vault/issues/251). +- vault_token_create module & lookup - no friendly error message was given when ``hvac`` was missing (https://github.com/ansible-collections/community.hashi_vault/issues/257). +- vault_write - add ``vault_write`` to the ``community.hashi_vault.vault`` action group (https://github.com/ansible-collections/community.hashi_vault/issues/251). + +Deprecated Features +------------------- + +- token_validate options - the shared auth option ``token_validate`` will change its default from ``True`` to ``False`` in community.hashi_vault version 4.0.0. The ``vault_login`` lookup and module will keep the default value of ``True`` (https://github.com/ansible-collections/community.hashi_vault/issues/248). + +New Plugins +----------- + +Lookup +~~~~~~ + +- vault_ansible_settings - Returns plugin settings (options) +- vault_kv1_get - Get a secret from HashiCorp Vault's KV version 1 secret store +- vault_kv2_get - Get a secret from HashiCorp Vault's KV version 2 secret store + +New Modules +----------- + +- vault_kv1_get - Get a secret from HashiCorp Vault's KV version 1 secret store +- vault_kv2_get - Get a secret from HashiCorp Vault's KV version 2 secret store + +v2.4.0 +====== + +Release Summary +--------------- + +Our first content for writing to Vault is now live. + +New Plugins +----------- + +Lookup +~~~~~~ + +- vault_write - Perform a write operation against HashiCorp Vault + +New Modules +----------- + +- vault_write - Perform a write operation against HashiCorp Vault + +v2.3.0 +====== + +Release Summary +--------------- + +This release contains new plugins and modules for creating tokens and for generating certificates with Vault's PKI secrets engine. + +New Plugins +----------- + +Lookup +~~~~~~ + +- vault_token_create - Create a HashiCorp Vault token + +New Modules +----------- + +- vault_pki_generate_certificate - Generates a new set of credentials (private key and certificate) using HashiCorp Vault PKI +- vault_token_create - Create a HashiCorp Vault token + +v2.2.0 +====== + +Release Summary +--------------- + +This release contains a new lookup/module combo for logging in to Vault, and includes our first filter plugin. + +Minor Changes +------------- + +- The Filter guide has been added to the collection's docsite. + +New Plugins +----------- + +Filter +~~~~~~ + +- vault_login_token - Extracts the client token from a Vault login response + +Lookup +~~~~~~ + +- vault_login - Perform a login operation against HashiCorp Vault + +New Modules +----------- + +- vault_login - Perform a login operation against HashiCorp Vault + +v2.1.0 +====== + +Release Summary +--------------- + +The most important change in this release is renaming the ``aws_iam_login`` auth method to ``aws_iam`` and deprecating the old name. This release also announces the deprecation of Ansible 2.9 and ansible-base 2.10 support in 3.0.0. + +Deprecated Features +------------------- + +- Support for Ansible 2.9 and ansible-base 2.10 is deprecated, and will be removed in the next major release (community.hashi_vault 3.0.0) next spring (https://github.com/ansible-community/community-topics/issues/50, https://github.com/ansible-collections/community.hashi_vault/issues/189). +- aws_iam_login auth method - the ``aws_iam_login`` method has been renamed to ``aws_iam``. The old name will be removed in collection version ``3.0.0``. Until then both names will work, and a warning will be displayed when using the old name (https://github.com/ansible-collections/community.hashi_vault/pull/193). + +Removed Features (previously deprecated) +---------------------------------------- + +- the "legacy" integration test setup has been removed; this does not affect end users and is only relevant to contributors (https://github.com/ansible-collections/community.hashi_vault/pull/191). + +v2.0.0 +====== + +Release Summary +--------------- + +Version 2.0.0 of the collection drops support for Python 2 & Python 3.5, making Python 3.6 the minimum supported version. +Some deprecated features and settings have been removed as well. + +Breaking Changes / Porting Guide +-------------------------------- + +- connection options - there is no longer a default value for the ``url`` option (the Vault address), so a value must be supplied (https://github.com/ansible-collections/community.hashi_vault/issues/83). + +Removed Features (previously deprecated) +---------------------------------------- + +- drop support for Python 2 and Python 3.5 (https://github.com/ansible-collections/community.hashi_vault/issues/81). +- support for the following deprecated environment variables has been removed: ``VAULT_AUTH_METHOD``, ``VAULT_TOKEN_PATH``, ``VAULT_TOKEN_FILE``, ``VAULT_ROLE_ID``, ``VAULT_SECRET_ID`` (https://github.com/ansible-collections/community.hashi_vault/pull/173). + +v1.5.0 +====== + +Release Summary +--------------- + +This release includes a new action group for use with ``module_defaults``, and additional ways of specifying the ``mount_point`` option for plugins. +This will be the last ``1.x`` release. + +Minor Changes +------------- + +- add the ``community.hashi_vault.vault`` action group (https://github.com/ansible-collections/community.hashi_vault/pull/172). +- auth methods - Add support for configuring the ``mount_point`` auth method option in plugins via the ``ANSIBLE_HASHI_VAULT_MOUNT_POINT`` environment variable, ``ansible_hashi_vault_mount_point`` ansible variable, or ``mount_point`` INI section (https://github.com/ansible-collections/community.hashi_vault/pull/171). + +v1.4.1 +====== + +Release Summary +--------------- + +This release contains a bugfix for ``aws_iam_login`` authentication. + +Bugfixes +-------- + +- aws_iam_login auth method - fix incorrect use of ``boto3``/``botocore`` that prevented proper loading of AWS IAM role credentials (https://github.com/ansible-collections/community.hashi_vault/issues/167). + +v1.4.0 +====== + +Release Summary +--------------- + +This release includes bugfixes, a new auth method (``cert``), and the first new content since the collection's formation, the ``vault_read`` module and lookup plugin. +We're also announcing the deprecation of the ``[lookup_hashi_vault]`` INI section (which will continue working up until its removal only for the ``hashi_vault`` lookup), to be replaced by the ``[hashi_vault_collection]`` section that will apply to all plugins in the collection. + +Minor Changes +------------- + +- community.hashi_vault collection - add cert auth method (https://github.com/ansible-collections/community.hashi_vault/pull/159). + +Deprecated Features +------------------- + +- lookup hashi_vault - the ``[lookup_hashi_vault]`` section in the ``ansible.cfg`` file is deprecated and will be removed in collection version ``3.0.0``. Instead, the section ``[hashi_vault_collection]`` can be used, which will apply to all plugins in the collection going forward (https://github.com/ansible-collections/community.hashi_vault/pull/144). + +Bugfixes +-------- + +- aws_iam_login auth - the ``aws_security_token`` option was not used, causing assumed role credentials to fail (https://github.com/ansible-collections/community.hashi_vault/issues/160). +- hashi_vault collection - a fallback import supporting the ``retries`` option for ``urllib3`` via ``requests.packages.urllib3`` was not correctly formed (https://github.com/ansible-collections/community.hashi_vault/issues/116). +- hashi_vault collection - unhandled exception with ``token`` auth when ``token_file`` exists but is a directory (https://github.com/ansible-collections/community.hashi_vault/issues/152). + +New Plugins +----------- + +Lookup +~~~~~~ + +- vault_read - Perform a read operation against HashiCorp Vault + +New Modules +----------- + +- vault_read - Perform a read operation against HashiCorp Vault + +v1.3.2 +====== + +Release Summary +--------------- + +This release adds requirements detection support for Ansible Execution Environments. It also updates and adds new guides in our `collection docsite <https://docs.ansible.com/ansible/devel/collections/community/hashi_vault>`_. +This release also announces the dropping of Python 3.5 support in version ``2.0.0`` of the collection, alongside the previous announcement dropping Python 2.x in ``2.0.0``. + +Minor Changes +------------- + +- hashi_vault collection - add ``execution-environment.yml`` and a python requirements file to better support ``ansible-builder`` (https://github.com/ansible-collections/community.hashi_vault/pull/105). + +Deprecated Features +------------------- + +- hashi_vault collection - support for Python 3.5 will be dropped in version ``2.0.0`` of ``community.hashi_vault`` (https://github.com/ansible-collections/community.hashi_vault/issues/81). + +v1.3.1 +====== + +Release Summary +--------------- + +This release fixes an error in the documentation. No functionality is changed so it's not necessary to upgrade from ``1.3.0``. + +v1.3.0 +====== + +Release Summary +--------------- + +This release adds two connection-based options for controlling timeouts and retrying failed Vault requests. + +Minor Changes +------------- + +- hashi_vault lookup - add ``retries`` and ``retry_action`` to enable built-in retry on failure (https://github.com/ansible-collections/community.hashi_vault/pull/71). +- hashi_vault lookup - add ``timeout`` option to control connection timeouts (https://github.com/ansible-collections/community.hashi_vault/pull/100). + +v1.2.0 +====== + +Release Summary +--------------- + +This release brings several new ways of accessing options, like using Ansible vars, and addng new environment variables and INI config entries. +A special ``none`` auth type is also added, for working with certain Vault Agent configurations. +This release also announces the deprecation of Python 2 support in version ``2.0.0`` of the collection. + +Minor Changes +------------- + +- hashi_vault lookup - add ``ANSIBLE_HASHI_VAULT_CA_CERT`` env var (with ``VAULT_CACERT`` low-precedence fallback) for ``ca_cert`` option (https://github.com/ansible-collections/community.hashi_vault/pull/97). +- hashi_vault lookup - add ``ANSIBLE_HASHI_VAULT_PASSWORD`` env var and ``ansible_hashi_vault_password`` ansible var for ``password`` option (https://github.com/ansible-collections/community.hashi_vault/pull/96). +- hashi_vault lookup - add ``ANSIBLE_HASHI_VAULT_USERNAME`` env var and ``ansible_hashi_vault_username`` ansible var for ``username`` option (https://github.com/ansible-collections/community.hashi_vault/pull/96). +- hashi_vault lookup - add ``ansible_hashi_vault_auth_method`` Ansible vars entry to the ``proxies`` option (https://github.com/ansible-collections/community.hashi_vault/pull/86). +- hashi_vault lookup - add ``ansible_hashi_vault_ca_cert`` ansible var for ``ca_cert`` option (https://github.com/ansible-collections/community.hashi_vault/pull/97). +- hashi_vault lookup - add ``ansible_hashi_vault_namespace`` Ansible vars entry to the ``namespace`` option (https://github.com/ansible-collections/community.hashi_vault/pull/86). +- hashi_vault lookup - add ``ansible_hashi_vault_proxies`` Ansible vars entry to the ``proxies`` option (https://github.com/ansible-collections/community.hashi_vault/pull/86). +- hashi_vault lookup - add ``ansible_hashi_vault_role_id`` Ansible vars entry to the ``proxies`` option (https://github.com/ansible-collections/community.hashi_vault/pull/86). +- hashi_vault lookup - add ``ansible_hashi_vault_secret_id`` Ansible vars entry to the ``proxies`` option (https://github.com/ansible-collections/community.hashi_vault/pull/86). +- hashi_vault lookup - add ``ansible_hashi_vault_token_file`` Ansible vars entry to the ``token_file`` option (https://github.com/ansible-collections/community.hashi_vault/pull/95). +- hashi_vault lookup - add ``ansible_hashi_vault_token_path`` Ansible vars entry to the ``token_path`` option (https://github.com/ansible-collections/community.hashi_vault/pull/95). +- hashi_vault lookup - add ``ansible_hashi_vault_token_validate`` Ansible vars entry to the ``proxies`` option (https://github.com/ansible-collections/community.hashi_vault/pull/86). +- hashi_vault lookup - add ``ansible_hashi_vault_token`` Ansible vars entry to the ``proxies`` option (https://github.com/ansible-collections/community.hashi_vault/pull/86). +- hashi_vault lookup - add ``ansible_hashi_vault_url`` and ``ansible_hashi_vault_addr`` Ansible vars entries to the ``url`` option (https://github.com/ansible-collections/community.hashi_vault/pull/86). +- hashi_vault lookup - add ``ansible_hashi_vault_validate_certs`` Ansible vars entry to the ``validate_certs`` option (https://github.com/ansible-collections/community.hashi_vault/pull/95). +- hashi_vault lookup - add ``ca_cert`` INI config file key ``ca_cert`` option (https://github.com/ansible-collections/community.hashi_vault/pull/97). +- hashi_vault lookup - add ``none`` auth type which allows for passive auth via a Vault agent (https://github.com/ansible-collections/community.hashi_vault/pull/80). + +Deprecated Features +------------------- + +- hashi_vault collection - support for Python 2 will be dropped in version ``2.0.0`` of ``community.hashi_vault`` (https://github.com/ansible-collections/community.hashi_vault/issues/81). + +v1.1.3 +====== + +Release Summary +--------------- + +This release fixes a bug with ``userpass`` authentication and ``hvac`` versions 0.9.6 and higher. + +Bugfixes +-------- + +- hashi_vault - userpass authentication did not work with hvac 0.9.6 or higher (https://github.com/ansible-collections/community.hashi_vault/pull/68). + +v1.1.2 +====== + +Release Summary +--------------- + +This release contains the same functionality as 1.1.1. The only change is to mark some code as internal to the collection. If you are already using 1.1.1 as an end user you do not need to update. + +v1.1.1 +====== + +Release Summary +--------------- + +This bugfix release restores the use of the ``VAULT_ADDR`` environment variable for setting the ``url`` option. +See the PR linked from the changelog entry for details and workarounds if you cannot upgrade. + +Bugfixes +-------- + +- hashi_vault - restore use of ``VAULT_ADDR`` environment variable as a low preference env var (https://github.com/ansible-collections/community.hashi_vault/pull/61). + +v1.1.0 +====== + +Release Summary +--------------- + +This release contains a new ``proxies`` option for the ``hashi_vault`` lookup. + +Minor Changes +------------- + +- hashi_vault - add ``proxies`` option (https://github.com/ansible-collections/community.hashi_vault/pull/50). + +v1.0.0 +====== + +Release Summary +--------------- + +Our first major release contains a single breaking change that will affect only a small subset of users. No functionality is removed. See the details in the changelog to determine if you're affected and if so how to transition to remediate. + +Breaking Changes / Porting Guide +-------------------------------- + +- hashi_vault - the ``VAULT_ADDR`` environment variable is now checked last for the ``url`` parameter. For details on which use cases are impacted, see (https://github.com/ansible-collections/community.hashi_vault/issues/8). + +v0.2.0 +====== + +Release Summary +--------------- + +Several backwards-compatible bugfixes and enhancements in this release. +Some environment variables are deprecated and have standardized replacements. + +Minor Changes +------------- + +- Add optional ``aws_iam_server_id`` parameter as the value for ``X-Vault-AWS-IAM-Server-ID`` header (https://github.com/ansible-collections/community.hashi_vault/pull/27). +- hashi_vault - ``ANSIBLE_HASHI_VAULT_ADDR`` environment variable added for option ``url`` (https://github.com/ansible-collections/community.hashi_vault/issues/8). +- hashi_vault - ``ANSIBLE_HASHI_VAULT_AUTH_METHOD`` environment variable added for option ``auth_method`` (https://github.com/ansible-collections/community.hashi_vault/issues/17). +- hashi_vault - ``ANSIBLE_HASHI_VAULT_ROLE_ID`` environment variable added for option ``role_id`` (https://github.com/ansible-collections/community.hashi_vault/issues/20). +- hashi_vault - ``ANSIBLE_HASHI_VAULT_SECRET_ID`` environment variable added for option ``secret_id`` (https://github.com/ansible-collections/community.hashi_vault/issues/20). +- hashi_vault - ``ANSIBLE_HASHI_VAULT_TOKEN_FILE`` environment variable added for option ``token_file`` (https://github.com/ansible-collections/community.hashi_vault/issues/15). +- hashi_vault - ``ANSIBLE_HASHI_VAULT_TOKEN_PATH`` environment variable added for option ``token_path`` (https://github.com/ansible-collections/community.hashi_vault/issues/15). +- hashi_vault - ``namespace`` parameter can be specified in INI or via env vars ``ANSIBLE_HASHI_VAULT_NAMESPACE`` (new) and ``VAULT_NAMESPACE`` (lower preference) (https://github.com/ansible-collections/community.hashi_vault/issues/14). +- hashi_vault - ``token`` parameter can now be specified via ``ANSIBLE_HASHI_VAULT_TOKEN`` as well as via ``VAULT_TOKEN`` (the latter with lower preference) (https://github.com/ansible-collections/community.hashi_vault/issues/16). +- hashi_vault - add ``token_validate`` option to control token validation (https://github.com/ansible-collections/community.hashi_vault/pull/24). +- hashi_vault - uses new AppRole method in hvac 0.10.6 with fallback to deprecated method with warning (https://github.com/ansible-collections/community.hashi_vault/pull/33). + +Deprecated Features +------------------- + +- hashi_vault - ``VAULT_ADDR`` environment variable for option ``url`` will have its precedence lowered in 1.0.0; use ``ANSIBLE_HASHI_VAULT_ADDR`` to intentionally override a config value (https://github.com/ansible-collections/community.hashi_vault/issues/8). +- hashi_vault - ``VAULT_AUTH_METHOD`` environment variable for option ``auth_method`` will be removed in 2.0.0, use ``ANSIBLE_HASHI_VAULT_AUTH_METHOD`` instead (https://github.com/ansible-collections/community.hashi_vault/issues/17). +- hashi_vault - ``VAULT_ROLE_ID`` environment variable for option ``role_id`` will be removed in 2.0.0, use ``ANSIBLE_HASHI_VAULT_ROLE_ID`` instead (https://github.com/ansible-collections/community.hashi_vault/issues/20). +- hashi_vault - ``VAULT_SECRET_ID`` environment variable for option ``secret_id`` will be removed in 2.0.0, use ``ANSIBLE_HASHI_VAULT_SECRET_ID`` instead (https://github.com/ansible-collections/community.hashi_vault/issues/20). +- hashi_vault - ``VAULT_TOKEN_FILE`` environment variable for option ``token_file`` will be removed in 2.0.0, use ``ANSIBLE_HASHI_VAULT_TOKEN_FILE`` instead (https://github.com/ansible-collections/community.hashi_vault/issues/15). +- hashi_vault - ``VAULT_TOKEN_PATH`` environment variable for option ``token_path`` will be removed in 2.0.0, use ``ANSIBLE_HASHI_VAULT_TOKEN_PATH`` instead (https://github.com/ansible-collections/community.hashi_vault/issues/15). + +Bugfixes +-------- + +- hashi_vault - ``mount_point`` parameter did not work with ``aws_iam_login`` auth method (https://github.com/ansible-collections/community.hashi_vault/issues/7) +- hashi_vault - fallback logic for handling deprecated style of auth in hvac was not implemented correctly (https://github.com/ansible-collections/community.hashi_vault/pull/33). +- hashi_vault - parameter ``mount_point`` does not work with JWT auth (https://github.com/ansible-collections/community.hashi_vault/issues/29). +- hashi_vault - tokens without ``lookup-self`` ability can't be used because of validation (https://github.com/ansible-collections/community.hashi_vault/issues/18). + +v0.1.0 +====== + +Release Summary +--------------- + +Our first release matches the ``hashi_vault`` lookup functionality provided by ``community.general`` version ``1.3.0``. + diff --git a/ansible_collections/community/hashi_vault/FILES.json b/ansible_collections/community/hashi_vault/FILES.json new file mode 100644 index 000000000..a64ed5bf0 --- /dev/null +++ b/ansible_collections/community/hashi_vault/FILES.json @@ -0,0 +1,4079 @@ +{ + "files": [ + { + "name": ".", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": ".github", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": ".github/actions", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": ".github/actions/ansible-codecov", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": ".github/actions/ansible-codecov/action.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "36982d06f3f822789b2b622cb93008c25fdc3ae8ea2e33aa1ba84ba5c9d5fbad", + "format": 1 + }, + { + "name": ".github/actions/ansible-codecov/process.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a3cb63be04c693ce50d1b79eb5bc3a10acf3ee7b07f3e27d020ebd4e92cd12ae", + "format": 1 + }, + { + "name": ".github/actions/collection-via-git", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": ".github/actions/collection-via-git/action.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "4eaeac272990f43a7c7f2cf6b31c3f2359ad01c795ba67e275086d6d40be935f", + "format": 1 + }, + { + "name": ".github/actions/docker-image-versions", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": ".github/actions/docker-image-versions/action.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "84f0cc9401479ea5de42f11f78b581b9e230a0676bbc6e79f2672f1a72f47912", + "format": 1 + }, + { + "name": ".github/actions/docker-image-versions/requirements.txt", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "d447e30c8738413442fd0c6166985764a5be5d67696f0fef4e2410c0bc6c0c1f", + "format": 1 + }, + { + "name": ".github/actions/docker-image-versions/versions.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "959b408cc5a5c6a8276906db11846cc1b5a0b73ff5761036cbfa92b410e73eee", + "format": 1 + }, + { + "name": ".github/actions/pull-ansible-test-images", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": ".github/actions/pull-ansible-test-images/action.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "04d1894d761baeecaa41a1436471185d4b3003790b23ea7db8100da734360f5f", + "format": 1 + }, + { + "name": ".github/workflows", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": ".github/workflows/ansible-builder.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "09b7434235b6c0eafa5ec2649fcb3f7121917e9f18723ca0af3c239d1bc644c7", + "format": 1 + }, + { + "name": ".github/workflows/ansible-test.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "442c8de42766184549bbc6e8f1687a80b488e71f7072d3aee57d9851c296b4d3", + "format": 1 + }, + { + "name": ".github/workflows/docs-push.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "6b0caa4f468944e75ecec40c2e70cb840981098d36f036f438378fd1272f45a5", + "format": 1 + }, + { + "name": ".github/workflows/docs.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "5ac8ea7d0b6cd4dc216696c731df6457c0ab1630988c7ff3ee52ed3d7ff04aa6", + "format": 1 + }, + { + "name": ".github/workflows/github-release.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "cebad181da6db6c618d2e698942a868e8e4ef90ad977465ebf2396ae2947d4b7", + "format": 1 + }, + { + "name": "LICENSES", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "LICENSES/GPL-3.0-or-later.txt", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "3972dc9744f6499f0f9b2dbf76696f2ae7ad8af9b23dde66d6af86c9dfb36986", + "format": 1 + }, + { + "name": "LICENSES/BSD-2-Clause.txt", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "f6036f79d054f42e11f2dd52458b4d2282e901d197955e598bf1a23600280cf0", + "format": 1 + }, + { + "name": "changelogs", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "changelogs/fragments", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "changelogs/fragments/.keep", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "format": 1 + }, + { + "name": "changelogs/changelog.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a854ea29e391bde2c6bcd8c45e7081d746daff404d3f4221af5f43c7e320a142", + "format": 1 + }, + { + "name": "changelogs/config.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "630354cc2146c9705841870850c9a5b3b8f61775452e45f056f3a5f84c6e5f20", + "format": 1 + }, + { + "name": "docs", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "docs/docsite", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "docs/docsite/rst", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "docs/docsite/rst/CHANGELOG.rst", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "79bab511c63a4f5ffdffbf0deb76f4b6aed0db672e46dc34413030f308cbba4a", + "format": 1 + }, + { + "name": "docs/docsite/rst/about_hashi_vault_lookup.rst", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "359935436b453b831511125a4295577e760a2653e406a4a3c35656edc63a2830", + "format": 1 + }, + { + "name": "docs/docsite/rst/contributor_guide.rst", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "74452cc593c894e17a9c02a109257b1d26018c31788dd6218a268b7bd0115684", + "format": 1 + }, + { + "name": "docs/docsite/rst/filter_guide.rst", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b1899cfe5b8d6dc464f2206440812c46da19de24e046804c90de5c427e101f7f", + "format": 1 + }, + { + "name": "docs/docsite/rst/localenv_developer_guide.rst", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "cbcb1a2dea36ddc33f0e2526db52cc5bd45147359a5b019c027197ba48184107", + "format": 1 + }, + { + "name": "docs/docsite/rst/lookup_guide.rst", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "3893168a6edb31267f0d340c91cecec21a764ee1bf36d865a13c6cfa5ee600e3", + "format": 1 + }, + { + "name": "docs/docsite/rst/migration_hashi_vault_lookup.rst", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "f16a89dc5d9df363f269a17b344f7e36b1fe15adb7478e7626f35c8011398087", + "format": 1 + }, + { + "name": "docs/docsite/rst/user_guide.rst", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "0cd48bf54449f41af3ccf83fec27be2554b3774b4bc76d7a7755a856b24b3244", + "format": 1 + }, + { + "name": "docs/docsite/extra-docs.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "470595d0ada634a31af0248b52ba5b501b345c04d6532053756d52376fc0c5c8", + "format": 1 + }, + { + "name": "docs/docsite/links.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9eabba48e208d2aae500a3ce03fa5f30608f7500791f9335690c0b0000e63d2b", + "format": 1 + }, + { + "name": "docs/preview", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "docs/preview/rst", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "docs/preview/rst/index.rst", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "ec5cac0ece5073961275cc10241108244de22b4ae35a2618ea70bf2c65e2a6d0", + "format": 1 + }, + { + "name": "docs/preview/.gitignore", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "650401faf4ef7de0d29709cfde6f6886ba550f8bdc864ce7675ffd6d320208c7", + "format": 1 + }, + { + "name": "docs/preview/antsibull-docs.cfg", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "de88d294648c438dc915aab5e5f1d720cb15216fbf6a47b2e77fbd66aaa436b7", + "format": 1 + }, + { + "name": "docs/preview/build.sh", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "3c40a54d95f2c5784967948998a9b906051f6585f1583b142dbd5efe9fa292af", + "format": 1 + }, + { + "name": "docs/preview/conf.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "5595b0e82f90f5d0c1500d0136645fe02f665a2734fcc76830a91e1acce7599d", + "format": 1 + }, + { + "name": "docs/preview/requirements.txt", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "1b93af5a0b69f8c0332198fe7eeb1c2409b8554c3b226c3d1cb5f46a78d82e79", + "format": 1 + }, + { + "name": "meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "meta/ee-requirements.txt", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "21886d56af804293670e040b8dd722757ec0247cee26caec659e667991efc556", + "format": 1 + }, + { + "name": "meta/execution-environment.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "08c58558baed01cd87b27714788c2f781345ef12a683731bddc3a9c9fec0179c", + "format": 1 + }, + { + "name": "meta/runtime.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "94f270496ec3a6415ee18ae1e586e8038b1b3f43d4b03eef6d5ba764259a0724", + "format": 1 + }, + { + "name": "plugins", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "plugins/doc_fragments", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "plugins/doc_fragments/attributes.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "40d52f4bd9de245d207ceecba561721767938d8d4fa89d3beced47c5527affc4", + "format": 1 + }, + { + "name": "plugins/doc_fragments/auth.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "d983c9c2ffec0153202c74cb4d003562e2d3272e75920097bcd0dee4bf08cc12", + "format": 1 + }, + { + "name": "plugins/doc_fragments/connection.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "02a646c306faab6eb74d38f99ff993dc67b3a636cc066dc17faab52514a3d4db", + "format": 1 + }, + { + "name": "plugins/doc_fragments/engine_mount.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "13bdfb1d9eeea4aede13b85daefc601ffdd70ebc6372a7ae2e532491f4530a7c", + "format": 1 + }, + { + "name": "plugins/doc_fragments/token_create.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "f4509ba9c5f3c6593b143f78a23c68b68e0ded436472297b0b7b11b7b5426fce", + "format": 1 + }, + { + "name": "plugins/doc_fragments/wrapping.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b9a1d5d5eaebb70d2733b3b2c673fdc6619fed7caca2297130356ab364e31837", + "format": 1 + }, + { + "name": "plugins/filter", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "plugins/filter/vault_login_token.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "4ea69cb4bd06394f829d013c97cd6693d5890fcc8b8aced49d2ea7914cff877a", + "format": 1 + }, + { + "name": "plugins/filter/vault_login_token.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "64031deeb8780d31e4c62ebee5bd00986ba0f5df1781b7b9b4f2a023e64d019c", + "format": 1 + }, + { + "name": "plugins/lookup", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "plugins/lookup/hashi_vault.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "1f9913f0e357c356b80860ddc23e77d435fe7bbd33caa5b907fddb42aff365b2", + "format": 1 + }, + { + "name": "plugins/lookup/vault_ansible_settings.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "6bc2336fab1349619595e0bec46c6f1ed5de0c7d61d9e29044c01ab3a4dcaf53", + "format": 1 + }, + { + "name": "plugins/lookup/vault_kv1_get.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "7ac8a8fc76f3315c704378161dfcb194497899d713712a36ae8bf5a1c5f8465f", + "format": 1 + }, + { + "name": "plugins/lookup/vault_kv2_get.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "eb837cc9607bb1f67e1f428c34a498b899ac8a7128bbc26f50fe1249f87e888e", + "format": 1 + }, + { + "name": "plugins/lookup/vault_list.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "06d00969ce4324c2664a0b29eedd220afb52599b51bed4819716c5af021003de", + "format": 1 + }, + { + "name": "plugins/lookup/vault_login.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "f84038c53b9fc4dc2074fe8f87098ad30740824eff3e496e9f10aaa6f812699a", + "format": 1 + }, + { + "name": "plugins/lookup/vault_read.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "18122bb835e8b67e9c7e73e4893bd381211560f15e855489c2b9a64758dade46", + "format": 1 + }, + { + "name": "plugins/lookup/vault_token_create.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9c208d252553cf150234f240d5898c40521627fe9f8db60330cae1fed7b7cd65", + "format": 1 + }, + { + "name": "plugins/lookup/vault_write.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "d8fd10b5075f0bdc34e8b1ea0f7707acc1e198bd2d7e5acf22033ad39646564e", + "format": 1 + }, + { + "name": "plugins/module_utils", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "plugins/module_utils/_auth_method_approle.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b0443e5f8143bb36d0f2f4daabd59aa83c8bc7a99e85d9d59868d7e37b0daf4e", + "format": 1 + }, + { + "name": "plugins/module_utils/_auth_method_aws_iam.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "85b6f75854df78a1a1412aab052128a31fb482ea3fde4b13f11dbc9152148622", + "format": 1 + }, + { + "name": "plugins/module_utils/_auth_method_azure.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "52865e47cfbf2cdfbb3d2bd4badc40b47a1d69129a6e52df1c4f152f5dac39a4", + "format": 1 + }, + { + "name": "plugins/module_utils/_auth_method_cert.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "ddd131064d5827eba9ac175855958ce0ad1eafac41f51fbb16fd99fbef67ed28", + "format": 1 + }, + { + "name": "plugins/module_utils/_auth_method_jwt.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "641a0e7c1e38d97c564ee4d0646bac797488b6e3f2bfb9ea5e09eeea6ffa6dd6", + "format": 1 + }, + { + "name": "plugins/module_utils/_auth_method_ldap.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "bcbc5d9ba050c07cdf55d9f638a0578ab42bb6fe6c38fa057090e4f2ba0737d4", + "format": 1 + }, + { + "name": "plugins/module_utils/_auth_method_none.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "3b15b43d69152bd5461b9d75c9aa436a4b32e398f53763954257fe0c8319ee97", + "format": 1 + }, + { + "name": "plugins/module_utils/_auth_method_token.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "f88bf860fbb14a5163dd30f0e03b629e4c7b8b83acfdb24f3783d8cc93a867c3", + "format": 1 + }, + { + "name": "plugins/module_utils/_auth_method_userpass.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e4013b231b2558cfd70cf15f9a40359fc3dc57c4b5860a280a4794fd50552466", + "format": 1 + }, + { + "name": "plugins/module_utils/_authenticator.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "4105181057d153a8daea1a2c9ede0faf16917481c7a2a8b64811afd9789de2bd", + "format": 1 + }, + { + "name": "plugins/module_utils/_connection_options.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "1c30d05a0fb24f6fa0d22f8837620101366b8fa761e89dcd7828d429537031e3", + "format": 1 + }, + { + "name": "plugins/module_utils/_hashi_vault_common.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "5ab33ef2f123fce48c13ea200a22ee06a45c565cdc147e4403f879ef585ff725", + "format": 1 + }, + { + "name": "plugins/module_utils/_hashi_vault_module.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "bc760b040087f59b503920f6d9220de105fa7a40c1bc2d9fb3e3fe94bbbc5283", + "format": 1 + }, + { + "name": "plugins/modules", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "plugins/modules/vault_kv1_get.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9cbf80740851cb0a83bd78262751fce2c8b0b68e30907f11df920c9103921c2d", + "format": 1 + }, + { + "name": "plugins/modules/vault_kv2_delete.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a513ed2efa1457798ae8858dea36bf91dd7b66c374ac00f4916925bc9f6a0186", + "format": 1 + }, + { + "name": "plugins/modules/vault_kv2_get.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "c8a301ac8e11268ac34d084f0bfdd037b7418cbb53466783bbf3cc48b6688356", + "format": 1 + }, + { + "name": "plugins/modules/vault_kv2_write.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "ef932a042e2304ae0e78ad2b802e38a9f6d898d83dbc53e185937ed97f70201a", + "format": 1 + }, + { + "name": "plugins/modules/vault_list.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "8e9ec6a358185e5d5ef4291f3bc0acd9f30b30a8e9c7382e91803bd10fa8f4c0", + "format": 1 + }, + { + "name": "plugins/modules/vault_login.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "c80fad9f98f2e8667489ee0e1d139b4e58e108a4ff8c9b4dca8aef9a13547641", + "format": 1 + }, + { + "name": "plugins/modules/vault_pki_generate_certificate.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "838fc6c1b1adc039a217866d2e9134dd85e999b7393b48c927e25fcccb8d9fdc", + "format": 1 + }, + { + "name": "plugins/modules/vault_read.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "bea1a0aa9d19e352c1c0bd023eb9bdf640ef86c60d5c051e5e9ead5ec30229c0", + "format": 1 + }, + { + "name": "plugins/modules/vault_token_create.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9e1f47a6dcfc7b9e3e3bbe93f66615f892a9264a326d5ad18d3fd7dfd99e73e6", + "format": 1 + }, + { + "name": "plugins/modules/vault_write.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "c17d015dff130769b05357cb3496b2b32acdbc4f1b4f84d2f0b24f31bfd51ae6", + "format": 1 + }, + { + "name": "plugins/plugin_utils", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "plugins/plugin_utils/_hashi_vault_lookup_base.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "8bacb7aeaf9aefb8a7b750c70c8a01a5d3970a326711c1579deaeff4de3fbd9e", + "format": 1 + }, + { + "name": "plugins/plugin_utils/_hashi_vault_plugin.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "4b62cbb67c6dd87fd62dc1919fc3da041393c52d4ffc9033822dd9e8697ac526", + "format": 1 + }, + { + "name": "tests", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_approle", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_approle/defaults", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_approle/defaults/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "ca64325b607f35c00f42f44420f714c5a9a44384e17f44c60cd27ec28e2b3d85", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_approle/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_approle/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9126d021b12eeeb4906402adad6b85a1536f4c3096b9f6864037ebaa5da25a2b", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_approle/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_approle/tasks/approle_setup.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "d8958a5a7dd185e348b082512c1030a744101677af0fbe47013d88cd46acf1a8", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_approle/tasks/approle_test_controller.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "db2c8d9bffe8900e52fca26547274879ea6f0f9082aa63a0fa5afbf061951b30", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_approle/tasks/approle_test_target.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "1901a0c244201493787e3a9b4191d7de38f3d78fb4119a25f71f61e6386bf8c1", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_approle/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e6afe86da906899d4cbb718e4ceee48d3fd935a77cc5503b69add5cf940bb9ef", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_approle/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "d5236e28efba210fd8a3cab3820fa959fd88f9c42b1634cf7e2aeea0ece56511", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_aws_iam", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_aws_iam/defaults", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_aws_iam/defaults/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "ce0db9a2d8c29fdf015245ef9a7533449a075b39605428ed6818cd92d7edd5ed", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_aws_iam/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_aws_iam/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9126d021b12eeeb4906402adad6b85a1536f4c3096b9f6864037ebaa5da25a2b", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_aws_iam/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_aws_iam/tasks/aws_iam_test_controller.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "1bf5b42d9279722cb59fa8b79f787c170371674d65e705510803b4f04c1861dd", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_aws_iam/tasks/aws_iam_test_target.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "91cb21963bca9ecfa4ecd1dc21fec09685e95e9df8d702057cc390295f7422c0", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_aws_iam/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9f8e209e859c00c8caf7898bb8fb2d35ddf6866c8e0074ce2c58d39296439f02", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_aws_iam/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "34acfe25784af2b0e914e0cb19763dfd48ddffc47a438de4c84ac843e7527a1d", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_azure", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_azure/defaults", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_azure/defaults/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "62975974e27d90a00a27482a6e7eb740c513bc7ca89e8a5f8db4e738900e1b9e", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_azure/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_azure/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "7bc9697018bc1afcd4d638ce183757be64930036cef35df5e5d56471953ebb99", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_azure/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_azure/tasks/azure_test_controller.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "da3a5b203ff528408272661a6d42291c80b6c67517c2a6fb0d3a29ad0f61904c", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_azure/tasks/azure_test_target.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "da5435958d587f7bb1eb774b6bda26390ea05c0a1c0a4ba57c9e7fd824f2ffda", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_azure/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "03882c949782ad0a1cbe23a0be8b849d7c6b85f7b5700b756c379fb7d2800cff", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_azure/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "32b18b440d9d37a9c4c161d1788569e21df1ac9f20729190a50226b14262fee7", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_cert", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_cert/defaults", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_cert/defaults/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "1d7a6d8198a5c27e72eb33a5187b201d5ca6bccef325354b6dca2f0d7efe496a", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_cert/files", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_cert/files/auth_cert.crt", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "8a5bbd02310486a599e8c27954d53cc1c6ea293af8397ff5aa95c94dfd89c6e8", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_cert/files/auth_cert.key", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "605a21daae2f51ddfaa72b147d753871e3a0e549f4a31ff13d5a330203ff0a70", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_cert/files/auth_cert_invalid.crt", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "898f7fc030ec88565e07463bc8773baeb482c16d13f6b32953dd00426f971412", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_cert/files/auth_cert_invalid.key", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "3f2676509e353329db2d1ee793b27219823611eb3fa28dac4559d14902289249", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_cert/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_cert/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "873f0b76b395f99d36fa00eefbc2806632b7ec1c8271e956b74df484b8c09c2d", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_cert/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_cert/tasks/cert_setup.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "d601b91cf0a4fa17c7c9e4e01d37e1e746f95dadac74723c3f470781c68374e9", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_cert/tasks/cert_test_controller.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "379848cef0083cb78cf45aa0147921b307b8f42881fa3000e7d0b3f87bdb5c91", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_cert/tasks/cert_test_target.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "94bd3b77ebd5c339f00b7a68e9221bdadc48fe00d217e50e3042c50743f6d63f", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_cert/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "ee0782baf3cc65d16b1159db751f29d817968fc6fe705ed541259afd3f58c618", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_cert/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "8d50a5445e2167206507a814ccb3b62fafe11354e66897641cc62d7070981a26", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_jwt", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_jwt/defaults", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_jwt/defaults/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9084e1944449c13fc1701beb35b959d7cdab30937b72aeec8db2236fd7087ee1", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_jwt/files", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_jwt/files/jwt_private.pem", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "d9adb929b5968b9305a591628a4ed2cb98d2aa4209f2da2d3a44a6fb46a17ba1", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_jwt/files/jwt_public.pem", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "21d20458029194ee3697de1327a50741ca22801800d48b65d30cfab876b4ef0a", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_jwt/files/token.jwt", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "560cb7531d1b1033910d828804ef4d7adbd4cef834fcc3d42f79cfef1a5e030b", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_jwt/files/token_invalid.jwt", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9292b2ff2f4a057d4c12de49e429ee861bb181c1ceae52726b1125acbae5c22e", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_jwt/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_jwt/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9126d021b12eeeb4906402adad6b85a1536f4c3096b9f6864037ebaa5da25a2b", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_jwt/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_jwt/tasks/jwt_setup.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "3c1c231b4aeeb7910e8cdac20200558790c7c95153bea03430fdf0695109ddf1", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_jwt/tasks/jwt_test_controller.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "73fc317f8d354a5462ded38f26674750eb9f1edb7644a559e291c54faeb0e7e5", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_jwt/tasks/jwt_test_target.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "1b1eba5701d38b6eff7de7547d24420d7d81ac86827c0aca044b5e41e14fade8", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_jwt/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "00aca2ae191aa626d9818d9f279a30e8ccf1172f31f77c683fe5ffaccd4deb40", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_jwt/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "70e3eb57534fe3b13d0957a6a5d9326e40aafe85a86d85ae1827cf2fbb6285f6", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_ldap", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_ldap/defaults", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_ldap/defaults/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "79d95ef0a75528ca8f2da430cc44bb8dc24efa3f523a1fb7b8c51229f04eea92", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_ldap/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_ldap/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9126d021b12eeeb4906402adad6b85a1536f4c3096b9f6864037ebaa5da25a2b", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_ldap/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_ldap/tasks/ldap_test_controller.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "af1a1b108f1ef4cbe4ed3ab5dc0f375607cb1a55c73d00a7f707de0d8b1a64d3", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_ldap/tasks/ldap_test_target.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "270f5d3f7ff671ea2f20a55dfd424b431aa133bff57dff5e98c556ee379bda63", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_ldap/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "5506a054b0000d291b243fdd6e3e2445377c21c3cc678cdcad1885e9921aa2f6", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_ldap/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "746bd98a942be0c7219c4bfd80619cd9af55fdafbf8cff3913cec992c52ad4f0", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_none", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_none/defaults", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_none/defaults/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "47c369e74981eb8578236e77ec9690702f22cc2cb2a02d1bfc11e83736408e37", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_none/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_none/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "4711f4181ed51d1fd3c0912702e2e28b8429485a3ded3bbe8088b873a6ff3888", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_none/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_none/tasks/controller.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "73901ad3685b2b565d800ddc3f24a5edaf1dc1aefc50b8d03332017e78b0649b", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_none/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "5deab3d68e0ed8c2f5e3a0ce7153b8b397127794e450bef88739b9dcab236cba", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_none/tasks/target.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "0e894434c8834f52723f77b13a86a33ba5be9fc96bbfa9bc7c6241c464afe87d", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_none/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "7c84433cbf8495cbd547fcfaad0abf5c898ef3929b352ad13f47c33a643923f1", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_token", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_token/defaults", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_token/defaults/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "36ca51296417405822bec7af5eadbe3a987184b11e33a0fbbb1b70c63b302315", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_token/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_token/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9126d021b12eeeb4906402adad6b85a1536f4c3096b9f6864037ebaa5da25a2b", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_token/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_token/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "6b0806626ea64468831efbb27c86078a7a43e94a82dcb77aef645851f67bff96", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_token/tasks/token_test_controller.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e135196c8e082e36f18cd35ea6e736a7d6c6c0f67f7298122c896ec4ed171353", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_token/tasks/token_test_target.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "c11eea8eba17a551373c684c7625da1e28d1f8e9072eb09bbfff48b57e2a35ed", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_token/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "d4237076d7f70f493ddd1e80ec6c574db541ecfcae1cd98bb9abbeeee4cb5ed2", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_userpass", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_userpass/defaults", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_userpass/defaults/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "f65ce2c628e893101c4475aa4b96968d535186be805515d13b43c540aa60cea0", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_userpass/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_userpass/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9126d021b12eeeb4906402adad6b85a1536f4c3096b9f6864037ebaa5da25a2b", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_userpass/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/auth_userpass/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "37c1144beac57279d2f79c9a49c2c37f8227e4f77203bf12bce1e59eeaf255a1", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_userpass/tasks/userpass_setup.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "0df93b897dc9a2fbf39741ed1b3d9fcc0acd3d415e8552e0071d7388ac6d9a8b", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_userpass/tasks/userpass_test_controller.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "4c7c56c926a214164769bf476bf8674d44c99d2d73d68d5eea6a98b1d47e8d58", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_userpass/tasks/userpass_test_target.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a80bb0f3842e9ada72841ec1677e0a12703634e5d65d18a71c2f7ddcc595e30a", + "format": 1 + }, + { + "name": "tests/integration/targets/auth_userpass/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "3b6a11c2f6acf054fd50c1f69b77a88ed6ae9ef08d35c96f25763511982a5cb7", + "format": 1 + }, + { + "name": "tests/integration/targets/connection_options", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/connection_options/defaults", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/connection_options/defaults/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "5986da2aa3be7215a51836f93ae40bb69f9affe9eee32662ff68b507156236d2", + "format": 1 + }, + { + "name": "tests/integration/targets/connection_options/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/connection_options/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "2ae20bcfb3dd6c76863de04b212aed1a3fb8c5190ac309bd3aa93dcb8475467e", + "format": 1 + }, + { + "name": "tests/integration/targets/connection_options/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/connection_options/tasks/controller.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "330ca6d4711e5e35f0b4045d6140127b7835e8d0b3e87cf66c3b98e88346b043", + "format": 1 + }, + { + "name": "tests/integration/targets/connection_options/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "f45e71b008b2948ea2891d280858d39073c817b031db0071229c7181784680fd", + "format": 1 + }, + { + "name": "tests/integration/targets/connection_options/tasks/target.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "34a316f8f3fada71ca60d476144e2b29fb6c6eb285a21468e1a8b3405abab115", + "format": 1 + }, + { + "name": "tests/integration/targets/connection_options/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "50d9724c185ec6a2e45615153d8613ed23a4b6e5c96f7ebb8aeee29e0f782e70", + "format": 1 + }, + { + "name": "tests/integration/targets/filter_vault_login_token", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/filter_vault_login_token/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/filter_vault_login_token/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "935be01ea6b573b8e5fedfb58dea455e5e22011ba6acf06d9ca62e790ec7d93f", + "format": 1 + }, + { + "name": "tests/integration/targets/filter_vault_login_token/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "adaa3032b3bfcff24c7644a4c0af9b5db2b5c286a2eb76767f9bc545a2fa8523", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_hashi_vault", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_hashi_vault/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_hashi_vault/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9126d021b12eeeb4906402adad6b85a1536f4c3096b9f6864037ebaa5da25a2b", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_hashi_vault/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_hashi_vault/tasks/lookup_setup.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "da29f44ffc011d762ccce49fb56d9f92c211efc2b23e862b06250b53cfa99b4e", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_hashi_vault/tasks/lookup_test.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "763927dee00e08efaeb2f61c3f876294a97e9d6277be53351e211974937b8b2a", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_hashi_vault/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a7166bdc9f51db1339ff24dff0a38c536e3a3b3c20aa47346f737aba536b4cd4", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_hashi_vault/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "f7ff302fd49cbfcfc2b3c1c9c891c6bf1d6fc1712bb2ebe5b00a8d64b709411d", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_ansible_settings", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_ansible_settings/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_ansible_settings/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9126d021b12eeeb4906402adad6b85a1536f4c3096b9f6864037ebaa5da25a2b", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_ansible_settings/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_ansible_settings/tasks/lookup_vault_ansible_settings_configure.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "af2af79283514d8668b779d79cc31b9d0d148e185679f7415cab6ad4f944d502", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_ansible_settings/tasks/lookup_vault_ansible_settings_test.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9299473167b43387d736e19644bf796d75b03b04d8824093995608a3dda7d0db", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_ansible_settings/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a9e6dd9165f53ff0b3abf3c262f48558d2719854d2d8a096d19e0aaa05e70556", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_ansible_settings/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "f7ff302fd49cbfcfc2b3c1c9c891c6bf1d6fc1712bb2ebe5b00a8d64b709411d", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_kv1_get", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_kv1_get/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_kv1_get/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9126d021b12eeeb4906402adad6b85a1536f4c3096b9f6864037ebaa5da25a2b", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_kv1_get/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_kv1_get/tasks/lookup_vault_kv1_get_setup.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "4d62c4e28a99a4ea4c7c852c2f9ad7f18ef5f0fa0aec6173d4d22549a258f3e7", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_kv1_get/tasks/lookup_vault_kv1_get_test.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "306a1c85bef35685a69d02894b81b9050e1aaa07227ccf8087ab6fab25a9a90e", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_kv1_get/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "6b7fa4b47cc75664e1815e7f0663d874c3a9aeb94527ae0a91fc77dea886b1f5", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_kv1_get/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "f7ff302fd49cbfcfc2b3c1c9c891c6bf1d6fc1712bb2ebe5b00a8d64b709411d", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_kv2_get", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_kv2_get/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_kv2_get/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9126d021b12eeeb4906402adad6b85a1536f4c3096b9f6864037ebaa5da25a2b", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_kv2_get/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_kv2_get/tasks/lookup_vault_kv2_get_setup.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "d927a3455e53a6b737a7d7900371cc8535a649629e0c4a2d011291b2a8d4d958", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_kv2_get/tasks/lookup_vault_kv2_get_test.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "6535bf6438db00083344aae821a2987d7997b0ea3a2e1034247dc1dd7f8104b2", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_kv2_get/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "f34a96cc5271230eba53dbfc2ad064e81463b3d40ddcf4071e08b6bbd37023c9", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_kv2_get/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "f7ff302fd49cbfcfc2b3c1c9c891c6bf1d6fc1712bb2ebe5b00a8d64b709411d", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_list", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_list/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_list/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9126d021b12eeeb4906402adad6b85a1536f4c3096b9f6864037ebaa5da25a2b", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_list/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_list/tasks/lookup_vault_list_setup.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "da29f44ffc011d762ccce49fb56d9f92c211efc2b23e862b06250b53cfa99b4e", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_list/tasks/lookup_vault_list_test.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "5faac85e3f97675e38057e4eeebd5f3b47070c2afadf11e8c6965175fdde5a71", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_list/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "8e2fcde3b9dc63fab20d62eecedcd9dd313207af67946bd25ff697d6d9306a78", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_list/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "f7ff302fd49cbfcfc2b3c1c9c891c6bf1d6fc1712bb2ebe5b00a8d64b709411d", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_login", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_login/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_login/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9126d021b12eeeb4906402adad6b85a1536f4c3096b9f6864037ebaa5da25a2b", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_login/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_login/tasks/lookup_vault_login_setup.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "4dac8d49fd0585e2c19586a49c648155603ac0586f9e4d408d542dfbe17a84c4", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_login/tasks/lookup_vault_login_test.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "eecce194ddc2f6c8ea15f2216c2bb352366d64c1207459247383b9d6201146c7", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_login/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "27b028cdc3eb814bb9033714dae8a1b4f0300edf82a56b254f08b2dff96ef2f2", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_login/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "f7ff302fd49cbfcfc2b3c1c9c891c6bf1d6fc1712bb2ebe5b00a8d64b709411d", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_read", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_read/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_read/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9126d021b12eeeb4906402adad6b85a1536f4c3096b9f6864037ebaa5da25a2b", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_read/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_read/tasks/lookup_vault_read_setup.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "da29f44ffc011d762ccce49fb56d9f92c211efc2b23e862b06250b53cfa99b4e", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_read/tasks/lookup_vault_read_test.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "de93d1a8e967dad0a25a49087e1c18ba0f0703773d59d93d4eea331d425d44d9", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_read/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a3b25bce4585d38ea55dcf7e79facd090914ad503d953c24ef9320884be35afa", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_read/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "f7ff302fd49cbfcfc2b3c1c9c891c6bf1d6fc1712bb2ebe5b00a8d64b709411d", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_token_create", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_token_create/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_token_create/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9126d021b12eeeb4906402adad6b85a1536f4c3096b9f6864037ebaa5da25a2b", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_token_create/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_token_create/tasks/lookup_vault_token_create_setup.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "c85486c5137bd9d5b4820e08b9c2206db7cbb87fa3af2461322048dff4bdff54", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_token_create/tasks/lookup_vault_token_create_test.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "f29b2d3b2cd1a555edd27e23b40084406e5b3c30c40cd9c7ac70d04cf25aeca2", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_token_create/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "71fc095f979e9878ecd0f454b9e3a7e654dd6ca9b07739f295e7e0d00df63b0c", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_token_create/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "f7ff302fd49cbfcfc2b3c1c9c891c6bf1d6fc1712bb2ebe5b00a8d64b709411d", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_write", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_write/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_write/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "4711f4181ed51d1fd3c0912702e2e28b8429485a3ded3bbe8088b873a6ff3888", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_write/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_write/tasks/lookup_vault_write_setup.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b2634aae7862e962b8e3b3d91e4c8139a3665f4594508759dbadb29b533098ac", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_write/tasks/lookup_vault_write_test.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "cf8dcda9a6613838dbf5d38d55eb7d26a8de4c405a4a254c01d2e1863c8a30a9", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_write/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a3ff1457a0a1cd947afe9dab8612c176f7d01e86dfe4426674e5b52b60e7b3da", + "format": 1 + }, + { + "name": "tests/integration/targets/lookup_vault_write/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "f7ff302fd49cbfcfc2b3c1c9c891c6bf1d6fc1712bb2ebe5b00a8d64b709411d", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv1_get", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv1_get/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv1_get/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9126d021b12eeeb4906402adad6b85a1536f4c3096b9f6864037ebaa5da25a2b", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv1_get/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv1_get/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a7b0fcc3cf144f647e9ff812d244e9a9fa5505ee93150968b74fa3c80af3f1d2", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv1_get/tasks/module_vault_kv1_get_setup.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "4d62c4e28a99a4ea4c7c852c2f9ad7f18ef5f0fa0aec6173d4d22549a258f3e7", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv1_get/tasks/module_vault_kv1_get_test.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "743099acae8046c0084a97e034a404a3d5ce5fd6f7d518325e6631b6fbe25100", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv1_get/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "fd716828fe3bf18b1f328708f5a49d15d2a07354307aa642833ac8c1f1221b8c", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_delete", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_delete/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_delete/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9126d021b12eeeb4906402adad6b85a1536f4c3096b9f6864037ebaa5da25a2b", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_delete/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_delete/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "dac8c3fdd19f0ef331541dbac0990585d56c790ac0e668c69f13aa43901f0733", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_delete/tasks/module_vault_kv2_delete_setup.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "deb41e77ec8ab94b7e52d069ba8abc7d608fface50b04d95e519cc79862476c2", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_delete/tasks/module_vault_kv2_delete_test.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "0838e8664760bdbc3c346e3a8ff6a0933669caa7b73f5bb61917200bf1a5f209", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_delete/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "fd716828fe3bf18b1f328708f5a49d15d2a07354307aa642833ac8c1f1221b8c", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_get", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_get/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_get/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9126d021b12eeeb4906402adad6b85a1536f4c3096b9f6864037ebaa5da25a2b", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_get/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_get/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "2eb537abfb3a977dbfc8c3f9d8aa6a74bf6d847a40b534f33bb01ab39d553ece", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_get/tasks/module_vault_kv2_get_setup.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9e6a6d56b14c282646e5963cea65a65810b588b6ea031a6da9c56cb55a92274f", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_get/tasks/module_vault_kv2_get_test.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "522b1a08b43c5197f21ffdc3d02fae29203603bd89a2613505821f63e78a81ca", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_get/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "fd716828fe3bf18b1f328708f5a49d15d2a07354307aa642833ac8c1f1221b8c", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_write", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_write/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_write/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9126d021b12eeeb4906402adad6b85a1536f4c3096b9f6864037ebaa5da25a2b", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_write/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_write/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "bcffa3e2bbb00d5cfb872e44a8c434f38082ece25980f9f5893565bff6d0d47c", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_write/tasks/setup.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "1af47e07f72f7218b072c4c596a424322c8c3859ff36a0070fa9d6c48747f8c6", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_write/tasks/test.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "d010ebc4f545365b01b006ba74fcbba1ef9de787a464eb8b07c18cb85b127d8c", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_write/vars", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_write/vars/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "dba69bdb30b13fd447059fe9dd9b8723a67bc07cc888072e61fe4703d8bf9209", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_kv2_write/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "fd716828fe3bf18b1f328708f5a49d15d2a07354307aa642833ac8c1f1221b8c", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_list", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_list/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_list/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9126d021b12eeeb4906402adad6b85a1536f4c3096b9f6864037ebaa5da25a2b", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_list/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_list/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "d2bc178fdf9e58520049f8c790ab59fea46d0e0702217b09d44585d8924cfcd6", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_list/tasks/module_vault_list_setup.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "da29f44ffc011d762ccce49fb56d9f92c211efc2b23e862b06250b53cfa99b4e", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_list/tasks/module_vault_list_test.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "04989cd5883e29a27e3518abf43101b3250a59ce12ab3fa211c47189ff073bc9", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_list/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "fd716828fe3bf18b1f328708f5a49d15d2a07354307aa642833ac8c1f1221b8c", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_login", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_login/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_login/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9126d021b12eeeb4906402adad6b85a1536f4c3096b9f6864037ebaa5da25a2b", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_login/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_login/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b22bf1cde572747f66c24506459ce8af2fe3eb10e7e76819a36dfdb073cac838", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_login/tasks/module_vault_login_setup.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "4dac8d49fd0585e2c19586a49c648155603ac0586f9e4d408d542dfbe17a84c4", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_login/tasks/module_vault_login_test.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "8c301ddad3eaf8cbcce282450509b8f97373a9eaba0f303b75bdbffa31632a7e", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_login/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "fd716828fe3bf18b1f328708f5a49d15d2a07354307aa642833ac8c1f1221b8c", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_pki_generate_certificate", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_pki_generate_certificate/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_pki_generate_certificate/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "f46c476164b3816ef5d5177c32e476907bb4d6f10521503f49fdd4ca7750dffd", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_pki_generate_certificate/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_pki_generate_certificate/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "4cecdc9ba4619ff50914708f6245117b193c1bd8d7cacdd3f0cc0e9d362f926a", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_pki_generate_certificate/tasks/module_vault_pki_generate_certificate_setup.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "fe70e81bb263e06bcae2f17a416e0924b37ad5b3909a0e40af204b537118e2af", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_pki_generate_certificate/tasks/module_vault_pki_generate_certificate_test.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "946878cbc4ccecb274c1bec29bfe02765fa29fcd790c2633c2b69c1ecd1850d5", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_pki_generate_certificate/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "fd716828fe3bf18b1f328708f5a49d15d2a07354307aa642833ac8c1f1221b8c", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_read", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_read/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_read/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9126d021b12eeeb4906402adad6b85a1536f4c3096b9f6864037ebaa5da25a2b", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_read/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_read/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "0dd8e3e62fa4a57db9b413e4720b557fa93f3ee3099d342dc9db5aabffc5f514", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_read/tasks/module_vault_read_setup.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "da29f44ffc011d762ccce49fb56d9f92c211efc2b23e862b06250b53cfa99b4e", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_read/tasks/module_vault_read_test.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "64c40f5bdb50e600ff94387a0d9aae3f0df171de2548d5bdfc693a2c7fc7cafd", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_read/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "fd716828fe3bf18b1f328708f5a49d15d2a07354307aa642833ac8c1f1221b8c", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_token_create", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_token_create/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_token_create/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9126d021b12eeeb4906402adad6b85a1536f4c3096b9f6864037ebaa5da25a2b", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_token_create/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_token_create/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "18612db702fea282a224794f73509a59be1ffc1916e90cfc55ecfa6a73f6919c", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_token_create/tasks/module_vault_token_create_setup.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "c85486c5137bd9d5b4820e08b9c2206db7cbb87fa3af2461322048dff4bdff54", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_token_create/tasks/module_vault_token_create_test.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "92960ce2f9e01c7d7bc1bc7865ffa941da04f0c6e916ff5fdc1c48b7c61c9008", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_token_create/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "fd716828fe3bf18b1f328708f5a49d15d2a07354307aa642833ac8c1f1221b8c", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_write", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_write/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_write/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "4711f4181ed51d1fd3c0912702e2e28b8429485a3ded3bbe8088b873a6ff3888", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_write/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_write/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9ba8927d006f6fe47f7b07101814e2ba0380abf275893ae9f23c2f10016c57fa", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_write/tasks/module_vault_write_setup.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b2634aae7862e962b8e3b3d91e4c8139a3665f4594508759dbadb29b533098ac", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_write/tasks/module_vault_write_test.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "2c77fe22b6a2c80eab637289cd32027d3893ecab8f9e9deee8551234a3ded4ae", + "format": 1 + }, + { + "name": "tests/integration/targets/module_vault_write/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "fd716828fe3bf18b1f328708f5a49d15d2a07354307aa642833ac8c1f1221b8c", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_cert_content", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_cert_content/defaults", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_cert_content/defaults/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "1a0680caa039fea93d79a7c029902d0bfe92fca93262c7a94d6500da1623b4ec", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_cert_content/files", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_cert_content/files/.gitignore", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "240a3e0d37d2e86b614063f5347eb02d4f99ca6c254de6b82871ff8d95532a7d", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_cert_content/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_cert_content/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e6bedb35975e2420202e837af3950fd4511e3329961155468062619a1a38c29c", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_cert_content/README.md", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "22f4f3c9b0922471eb243107f1c2981e078dda06b80df33a2a25cd652c5bcaae", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_cert_content/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e084a3683ef795d1cdbf5e9b253f2ca1f783ae0d0d6e47e419acbbc4fc80bbfa", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/defaults", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/defaults/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e9186a70864ebe946235117d244026dff8de74ffc6185f470c947fc83aa7e046", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/files", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/files/.output", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/files/.output/.gitignore", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "240a3e0d37d2e86b614063f5347eb02d4f99ca6c254de6b82871ff8d95532a7d", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/files/playbooks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/files/playbooks/vault_docker.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "db75d75550fcb5334f8161ae8b591f145c6604a093247e9149f723bf26682abb", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/files/requirements", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/files/requirements/constraints.txt", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "521caee2728da71f6f8c804ee6a125081b38ac1b07b0acb48d790eeeca722eeb", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/files/requirements/requirements.txt", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e2af5fc6f791738b66b031c46731e8de0976265f02dc51901174af1f1be3b4e3", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/files/requirements/requirements.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "be87a9d501b75b973dc2f4fe10737999d8efab083b9c6110babe06cf1cb3d502", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/tasks/docker.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b96cfb39b0bd4c4d5211ba46f1d627da4fc612d33304cfbf12c7540bb931644e", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "5847657fe7ab224b6a780b62359a725f4dd8978c9fbde213bb6dd124d648ab19", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/templates", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/templates/mmock", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/templates/mmock/aws_iam_login_alt_mount.yml.j2", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "317b5333ca14afbdf3b8644f736989c51e00acd8be74e45c0346172ed9959ebe", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/templates/mmock/aws_iam_login_bad_request.yml.j2", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a1e5b3aa7d0cece8d2903f8940700daa9f16270ba26768efe233a128521e9732", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/templates/mmock/aws_iam_login_default_mount.yml.j2", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "67d4fa267c88acd0535b6a210173c4c09328d0bd85680680ad8b5c9864bd6b4f", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/templates/mmock/azure_login_alt_mount.yml.j2", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "28b66797633cef63f3ba52eec1651f9d431cddd7890ae1a1d0af064efe7f6a39", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/templates/mmock/azure_login_bad_request.yml.j2", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b12b3b3c6c4c610e65dddc4fd329ce4279a5d30e2e7cf3718f5aa89c7ebbe8cd", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/templates/mmock/azure_login_default_mount.yml.j2", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "3e8d33a83d997c6ef3bdf521a4d84b58ba679fb549c87ac5386545c22e2e0d12", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/templates/mmock/ldap_login_alt_mount.yml.j2", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e0e1199213a8f232275836cce867670178da1debbaaf71b33634d3c3fc8060f5", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/templates/mmock/ldap_login_bad_request.yml.j2", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "ae0065389625d436409b87c1ceba34f7c9807a64607d9805d555e607615adda9", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/templates/mmock/ldap_login_default_mount.yml.j2", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "fede7fa23329da2e7aa6355244ddc7eff7a8eea8470636e407029617ef3932a9", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/templates/mmock/proxy.yml.j2", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "47b130680233fb0cbd00befacae9bbe7392f32d3faebdad11b498f74e192a44f", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/templates/docker-compose.yml.j2", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "4ee4aded02dc766e59599c22e977f266fee2a4e238b5c9cb5b7ccfbd31e089e3", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/templates/integration_config.yml.j2", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "73398c97802ef294f0805aaf9280ee34af978cc2d8ece24d249a4ed77f8a5024", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/templates/vault_config.hcl.j2", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9c7254a396dc9ac74af1f1fbb0892cab39fba0cec4527e56aa248543455315f2", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/vars", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/vars/local_client.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "36592c2cde448b70070f6774b1dd4296c90637ba8647cc41ab9334ee6b6caed0", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/vars/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "ba1d50adc2585030809325a0102576dbb5cb1c5f4c1613bf5f1f4a2ce0bd6cc8", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/README.md", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "7ec55f30a70285ad085f1d5657cc57eb74aab7aceaa451da51945c093db42f58", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b6b61217ce2fd2439b7be0f3d4ad5552a4f0ffeb0a23fec50e7b7933e63fb4e7", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_docker/setup.sh", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "ce168bb8820943719cf7951e1fe4d2d1fe152298b13ee684273dbd6ed51f33ee", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_gha", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_gha/defaults", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_gha/defaults/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e76edf2a977f8207982b1f795e926339aa00ad56f520bb30f227eb0f3aa50e5b", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_gha/files", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_gha/files/.output", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_gha/files/.output/vault_config", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_gha/files/.output/vault_config/cert.pem", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "457bda4f39bada51eba8565292fd97804f6c5e683634edf14b68cff7ff9d2e8a", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_gha/files/.output/vault_config/key.pem", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e5c6ad60034f400e79bce53297d545001f2ef542bb4acd2d582c582a3a57c88a", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_gha/files/playbooks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_gha/files/playbooks/gha.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e3f4b18483da6eadf64e5ed8193d89ba648daa2ac67c61df156e7dbd9a166496", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_gha/files/.gitignore", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "62e40bb603f9363ec6311b250a05015073b96762c8d5922f0feabdc8abc0fd27", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_gha/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_gha/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "0a2890aea249c0c8fe4a98f4c186874cae98946310d2d210c687d011579a372d", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_gha/templates", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_gha/templates/launch.sh.j2", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "8140290d9e2cb642fa73825d36757eff353dd08fa064d923cd7ad6918df89ef9", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_gha/README.md", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "14a742813519f9c0bcc79008d68833dfa9c6be74f794a87554e086ededeec76f", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_gha/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "d1af30078d09889b4cd75307801e3725159cc14f8a57d26fc1d1abe0a36435dc", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_localenv_gha/setup.sh", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "cee01fa3f5fb94286449ffa1739479a927b6375415d8ff577a1ddc231269d0e5", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_configure", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_configure/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_configure/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "4711f4181ed51d1fd3c0912702e2e28b8429485a3ded3bbe8088b873a6ff3888", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_configure/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_configure/tasks/configure.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "01c1515b851b9049a8144d87ff083ccdec7096812a106da9b30f8cce2e4a457c", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_configure/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "f6f74a7189b5acfb583f01bce8062cee9404d2c7686a67d3c112287565839b7f", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_configure/vars", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_configure/vars/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "19301a0ad2a403e049142fde1390f7d8156784c10b2219ab0619fdf91ed0a7b4", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_configure/README.md", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "3839692716e48d420a016198f8d8de5ca0e695a988ec1453daf02a97d2971232", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_configure/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "0584bb783019be77fe15aba5347fba524b641155594286366214a4647662c8e8", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_configure_engine_pki", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_configure_engine_pki/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_configure_engine_pki/meta/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "4711f4181ed51d1fd3c0912702e2e28b8429485a3ded3bbe8088b873a6ff3888", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_configure_engine_pki/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_configure_engine_pki/tasks/configure.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "1822e18e219613803554c9395e009666ba920a3f4520f62153800e974b1907f9", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_configure_engine_pki/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "7e519299d20d9793981bfc0fa7624032fec6c7f4c6d4dfb4ebe9741978e73c0e", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_configure_engine_pki/vars", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_configure_engine_pki/vars/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "0099fc53071306888dd1b1b0fecae25e30e6fbabcc9905f4ab91eb4f9b679620", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_configure_engine_pki/README.md", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e69967877b7033967b0541a77b4a8ec9231a51633298eff91165b363f06c1497", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_configure_engine_pki/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "0584bb783019be77fe15aba5347fba524b641155594286366214a4647662c8e8", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_server_cert", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_server_cert/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_server_cert/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "5b26ee7e8a5071d7f6e1f755353627c5fc191e292ea1a8c63d4183cb6e6a5789", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_server_cert/README.md", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "814650980f8e9ea2aabe528247b99f401dc6e928d58794560528c5ff9a7986ae", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_server_cert/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e084a3683ef795d1cdbf5e9b253f2ca1f783ae0d0d6e47e419acbbc4fc80bbfa", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_test_plugins", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_test_plugins/library", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_test_plugins/library/vault_ci_enable_auth.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b491af9b390e8582dbf748f0296aaec3c499729169e6c1157cfdb78738f63101", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_test_plugins/library/vault_ci_enable_engine.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "1af578eec94693d62cab2cf9c08c5b0cec1e5f13bebee6d3a9336d6c6b785dd7", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_test_plugins/library/vault_ci_kv2_destroy_all.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "678fa8916e71659be319196a491dec4c3ddf61163a0706cd2f96c70b8e5a2de8", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_test_plugins/library/vault_ci_kv2_metadata_read.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "ee12906ce17c8cbe81aebc23a69ec33b6f5d7776607fbce4544756f2521f7888", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_test_plugins/library/vault_ci_kv_put.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "41dcea25747ef02a9f2ee7b7376d935848734a656feb5aba4c46b9315b63dee9", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_test_plugins/library/vault_ci_policy_put.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "cf52ace89aaf5ce040f3788a3bc5b9e8f00099255027342f73e3c266c62e960e", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_test_plugins/library/vault_ci_read.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "5fd56085ed7eae5fea337283e9356a9ad5413671106dc93e2a0e2a642c43a387", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_test_plugins/library/vault_ci_token_create.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "40648679572beb0b849c93fcc1ed691a153913a0046834447c213fa1ae7b07f9", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_test_plugins/library/vault_ci_write.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "0b62e9f618ecadbfb87e24b1611f4f8b7d15ddc706f8d37283a0e8e97c6498d2", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_test_plugins/library/vault_test_auth.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a7f4633c93b45f7853072bf752887bfd35be177e27d6f5a12fe642c0dfba4c26", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_test_plugins/library/vault_test_connection.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b093a6bfa730b32c2aff34d941c8066ca04bbf4acc51a2ce6023530734614ecc", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_test_plugins/lookup_plugins", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_test_plugins/lookup_plugins/vault_test_auth.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b05277e1806e69931e54925922132b45bf7f8ad65ca57bbbec446826a09245bb", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_test_plugins/lookup_plugins/vault_test_connection.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "acc78b6e0a95ea2d25fcfa9caa2fad35851d84e6455239ec3357f23f074306f2", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_test_plugins/vars", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_test_plugins/vars/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "534251c9911a8c09e3ee5f51539a534b102e06b4448a3ae173c6029a0d0e45c5", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_test_plugins/README.md", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "908e82fca186a13504010c5c0b84fa37c358b5131d225014ebe8864115b8d858", + "format": 1 + }, + { + "name": "tests/integration/targets/setup_vault_test_plugins/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e084a3683ef795d1cdbf5e9b253f2ca1f783ae0d0d6e47e419acbbc4fc80bbfa", + "format": 1 + }, + { + "name": "tests/integration/.ansible-lint", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "982c0b7fe8683e818a27cf59b6a6892d91c277703c81e2db35994f63ef90e7fc", + "format": 1 + }, + { + "name": "tests/integration/integration.cfg", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "67339294dd28f46be1f2fb0e4c3c8b964db8ca3037bff2a9a2efb289b33b7cbb", + "format": 1 + }, + { + "name": "tests/integration/integration_config.yml.sample", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "c278972bf182fe5c9023d0bd549b0d53377f151b1f0225c755b4ccd44427cd47", + "format": 1 + }, + { + "name": "tests/integration/requirements.txt", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b46f66680eea1291c232bd2e08db3258aa94ad4a3c5a26750203dc71046810f5", + "format": 1 + }, + { + "name": "tests/unit", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit/compat", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit/compat/__init__.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "format": 1 + }, + { + "name": "tests/unit/compat/builtins.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "7163336aa20ba9db9643835a38c25097c8a01d558ca40869b2b4c82af25a009c", + "format": 1 + }, + { + "name": "tests/unit/compat/mock.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "d6497f477d48680f5ce57d9a3ae6522206b41a24adbdb132c44e0547730378ef", + "format": 1 + }, + { + "name": "tests/unit/compat/unittest.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "5401a046e5ce71fa19b6d905abd0f9bdf816c0c635f7bdda6730b3ef06e67096", + "format": 1 + }, + { + "name": "tests/unit/fixtures", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit/fixtures/approle_login_response.json", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e4f61385dcc47517369bd63ab153d0cfff45aa6522be913385619be4626ff51e", + "format": 1 + }, + { + "name": "tests/unit/fixtures/approle_secret_id_write_response.json", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "db3fdc71a16186503edc4a7dfd3cf744e6654af0fea6acdb46d968496bd9c191", + "format": 1 + }, + { + "name": "tests/unit/fixtures/aws_iam_login_response.json", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "2a915cbb994cfe0e1a27eba3f538f26456af4106b0a1ad368df0a819cc89193a", + "format": 1 + }, + { + "name": "tests/unit/fixtures/azure_login_response.json", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "951bbf75083be047d644e6a74757318159582794c8ba4f35271f07d950dd085a", + "format": 1 + }, + { + "name": "tests/unit/fixtures/cert_login_response.json", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "80ebb3d62931e358c6d3bab71ecb14a8e9e6b614ebdd3df9feb5ea4465b3ca83", + "format": 1 + }, + { + "name": "tests/unit/fixtures/jwt_login_response.json", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e213c3dbe3d37626928d540eb0de42e748bca4c80013fc3e21160f2da13aa9ec", + "format": 1 + }, + { + "name": "tests/unit/fixtures/kv1_get_response.json", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b94a6484dc1cbcda7f4af320fdf756efc2182aaed9494828e269b42fa527b0ac", + "format": 1 + }, + { + "name": "tests/unit/fixtures/kv2_get_response.json", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "0e09402aafa1ef36d22dd8b3eaedad1f7b7260db7c043660db44e249bdffb36c", + "format": 1 + }, + { + "name": "tests/unit/fixtures/kv2_list_response.json", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "dd8c344080f2ea551258193262fb348e1886acab5e9c76271b8d1758f82d5ffc", + "format": 1 + }, + { + "name": "tests/unit/fixtures/ldap_login_response.json", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "4da5a43a27b5edfea54f89e6a115eaa3ff03983ee4fbf437f14839884e344b03", + "format": 1 + }, + { + "name": "tests/unit/fixtures/lookup-self_with_meta.json", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "7b5f97ac230212a022a647eef60b682aa837474d73060dc51b7d4d6d3fc15aa3", + "format": 1 + }, + { + "name": "tests/unit/fixtures/lookup-self_without_meta.json", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "7fb845f44041844368aa5e0b685ecd0a0252a226836cbd6c4693ed182352cb13", + "format": 1 + }, + { + "name": "tests/unit/fixtures/pki_generate_certificate_response.json", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e7a06a7fd0006ac6457547f59aa08e2160a7cb133c081dd0b0d3d494243c0dc7", + "format": 1 + }, + { + "name": "tests/unit/fixtures/policy_list_response.json", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "5cdf215cb185be7ada2af928ac678b11014133f9255c9f6431e83fc786abcc54", + "format": 1 + }, + { + "name": "tests/unit/fixtures/token_create_response.json", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "2b67d33f1c74f305c363b853524f59d935960ee8f60f6d8c932736c9c315c86f", + "format": 1 + }, + { + "name": "tests/unit/fixtures/userpass_list_response.json", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "4c402a29e935b6e672bfbac06b30d2627ea5babdf2e767220c11b943afd61c23", + "format": 1 + }, + { + "name": "tests/unit/fixtures/userpass_login_response.json", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "c8325a14998b64b632e55a78504595cef5926142abf74b334f3e309b4385d170", + "format": 1 + }, + { + "name": "tests/unit/fixtures/vault-token", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "5d399bd12bf2def4d7c7b3e71fe6e66af3270a0387c1a560adc2c7e69a2486d6", + "format": 1 + }, + { + "name": "tests/unit/plugins", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit/plugins/filter", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit/plugins/filter/test_filter_vault_login_token.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "543240818686c85a8a168018d6d3a815579671ac163a17ab0749fbdb3a9b2845", + "format": 1 + }, + { + "name": "tests/unit/plugins/lookup", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit/plugins/lookup/conftest.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "d6c7f8ca5012118e90d797deff5b47138150bd5113a41da4d314450c41d98bd8", + "format": 1 + }, + { + "name": "tests/unit/plugins/lookup/test_hashi_vault.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "d4789801a5401f90b1d8724e0515217a7c2ebcc17ca9db71661a1375249402f2", + "format": 1 + }, + { + "name": "tests/unit/plugins/lookup/test_vault_ansible_settings.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "5085fad371153c9fec2aec06aabbe1501482b568a7634441c4fe0a3c024919ce", + "format": 1 + }, + { + "name": "tests/unit/plugins/lookup/test_vault_kv1_get.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "7cc81d29c5ec443c667d6413151c0dbf16ddeb064f70ff8aeb5d2ab5aa58bd27", + "format": 1 + }, + { + "name": "tests/unit/plugins/lookup/test_vault_kv2_get.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "dce588f189b9ceef4d4e154053a7aa75148ffecbcad5ed8b86fcbfab6356db8c", + "format": 1 + }, + { + "name": "tests/unit/plugins/lookup/test_vault_list.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "5f71f7f9ff7010031d4e2ed04db13337be2750cccd28b91639ba98f7ffb52733", + "format": 1 + }, + { + "name": "tests/unit/plugins/lookup/test_vault_login.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e4baa8e6ad79334b80cd81d120019511167c55e893476a4558a5d07905a8b3ae", + "format": 1 + }, + { + "name": "tests/unit/plugins/lookup/test_vault_read.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "4aaf6df6a38abdbab3c83596d35efd3e329b1cd8a73c7c8e18b9e3ae528b0b6c", + "format": 1 + }, + { + "name": "tests/unit/plugins/lookup/test_vault_token_create.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "da6e62da0beabc991cebc40e063f861b6b46f98ef501d4c28f17ee7e68e34b05", + "format": 1 + }, + { + "name": "tests/unit/plugins/lookup/test_vault_write.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "db7a4a050e4c80b9ef058ed9c28e06ff7f7c94d0d6fac535455d270e69cd06ed", + "format": 1 + }, + { + "name": "tests/unit/plugins/module_utils", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit/plugins/module_utils/authentication", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit/plugins/module_utils/authentication/conftest.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "18727f21eb0e6cee937cfc4933beebbe7cde4e1a858dca446c88ca6527746ca4", + "format": 1 + }, + { + "name": "tests/unit/plugins/module_utils/authentication/test_auth_approle.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "ff6e8516f14dd6e122c70973d30e1b9497e2ac72eaf1c351d37cfd121cd7115c", + "format": 1 + }, + { + "name": "tests/unit/plugins/module_utils/authentication/test_auth_aws_iam.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "47a0b6f5339479ccf7ba86344478bb936d468e7cac20fdabe8ea84248d4f086c", + "format": 1 + }, + { + "name": "tests/unit/plugins/module_utils/authentication/test_auth_azure.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e7874e069e88a75c257fb80cf612bf78316ace21e892289ad59981907906dd77", + "format": 1 + }, + { + "name": "tests/unit/plugins/module_utils/authentication/test_auth_cert.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "99ef4f0dfe21d7a58812d2233d346965e2711282f72a06f2ed1e9f679052f4b6", + "format": 1 + }, + { + "name": "tests/unit/plugins/module_utils/authentication/test_auth_jwt.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "45ece6c06ac282ae20fc5fa2eae8f9d5f8af9eb2ca46ee5889361637f958d45d", + "format": 1 + }, + { + "name": "tests/unit/plugins/module_utils/authentication/test_auth_ldap.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "f00afb41614f09adb5da141e488bc80c647770342dcb7b73ac2bacb009aaeb68", + "format": 1 + }, + { + "name": "tests/unit/plugins/module_utils/authentication/test_auth_none.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b994683c71c6a274f4b0deb50c8e3ae566f217e4d692675657e1bcd5cdaa6eeb", + "format": 1 + }, + { + "name": "tests/unit/plugins/module_utils/authentication/test_auth_token.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "abac6adc2d23f2368700edac95f9734153379ce62837cacf277653024b7b63b4", + "format": 1 + }, + { + "name": "tests/unit/plugins/module_utils/authentication/test_auth_userpass.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b027c121cd78436740585fc3dc632dee1787cfbaa1bf4d97eba389b8bdd6b26d", + "format": 1 + }, + { + "name": "tests/unit/plugins/module_utils/authentication/test_hashi_vault_auth_method_base.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "0c0861f123a4cf0f291f857035d04754b6a547e79e47b1a406e48b4022f3b44b", + "format": 1 + }, + { + "name": "tests/unit/plugins/module_utils/authentication/test_hashi_vault_authenticator.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "d8c16bb049e30655356c9eb5a48802d9c7d5ed2177b2730301e9dd05982cacc5", + "format": 1 + }, + { + "name": "tests/unit/plugins/module_utils/option_adapter", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit/plugins/module_utils/option_adapter/conftest.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "2483bdada5a58ffe453a28e0f1e118a5f286d55b0dbba6f1796349778ef4014c", + "format": 1 + }, + { + "name": "tests/unit/plugins/module_utils/option_adapter/test_hashi_vault_option_adapter.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "df8f8ddc9ecf159214128c2987b6186805af8ea631cd855169a702db8617e767", + "format": 1 + }, + { + "name": "tests/unit/plugins/module_utils/test_hashi_vault_connection_options.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "d40bcd481379315d4362afb0cc0e3b735b024ec5c85fb54055b05374da52fcd6", + "format": 1 + }, + { + "name": "tests/unit/plugins/module_utils/test_hashi_vault_helper.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "02ca0e5fb0d77d7de7a86b1fe7ce9bce5f3d33a71bf64c1785376f50200aee89", + "format": 1 + }, + { + "name": "tests/unit/plugins/module_utils/test_hashi_vault_option_group_base.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b96a6e75abc5b37507b89bab63ca4bd3db8a0e01bfe14477b0ea9deab1402ac1", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/conftest.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "c43bad6fcdf0452b1fc5d3d1f42c030ead02c240139e2da976067488d5bddb46", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_vault_kv1_get.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "edc875961f391485aa0e4da2883ea5c2dab31a653fd368c71dc5f58ae3e5be08", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_vault_kv2_delete.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "534eebf481a4270089eb09fae7a20b04206224c55e3747344996b88969290fa2", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_vault_kv2_get.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "4f80d138e1c713f2979201cc7781cc109aca9e1ac52c28ca432db3dd198d5154", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_vault_kv2_write.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "819ad968420e3c41ffc7e500c6c4f0cd313615c4fa9d4533e0b85a8de2006f8e", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_vault_list.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "7df75c81c14e76af4b8efbeeeb363c14f0c5ae7d7657c3a6e0230d723bc6d641", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_vault_login.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "84f5fadfb29cff822bd19fdf350ad0e173eca8201246536e89ee69e53182807c", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_vault_pki_generate_certificate.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "2c7ffc3b60b172c704c2947bfd212c92ea2a2e4bef1b71a3967c3e022c9879e6", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_vault_read.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "c2b55f98343e54bd534fd56dadd49df7f213c54c456066e6b74abe25d9e0319f", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_vault_token_create.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "4372d614d93cf553638d5a1ff7b8b788b7e25f9145a6bf524b9cfd38f1e93f4d", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_vault_write.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b723e07376ec9c56bd5070e0be49c19d6e6542aa46b9b4e3491170bb1af6ce4c", + "format": 1 + }, + { + "name": "tests/unit/plugins/plugin_utils", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit/plugins/plugin_utils/authentication", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit/plugins/plugin_utils/authentication/conftest.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "7eaacca9465aa4ab541a88254fe6700ece427d3e9fd832c2733bfd71e30c6899", + "format": 1 + }, + { + "name": "tests/unit/plugins/plugin_utils/authentication/test_auth_token.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b48581be86aa632e7580301a0d3d62d06219b3d24b44a94b6c4ca73e23e6f1fc", + "format": 1 + }, + { + "name": "tests/unit/plugins/plugin_utils/base", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit/plugins/plugin_utils/base/test_hashi_vault_lookup_base.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b38f37e9cb846e77a00e6b1dcf63db09ffde02460e02ec278317e091a950dd42", + "format": 1 + }, + { + "name": "tests/unit/plugins/plugin_utils/base/test_hashi_vault_plugin.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "6715e97a94bd36d796e229f86baf13d2622740f663f0a0fa2764bfd9ff094f99", + "format": 1 + }, + { + "name": "tests/unit/plugins/plugin_utils/option_adapter", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit/plugins/plugin_utils/option_adapter/conftest.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "692431478c133aedf9e66f939cb63ecc7fc66a58191ace5569759a3af40ed5c5", + "format": 1 + }, + { + "name": "tests/unit/plugins/plugin_utils/option_adapter/test_hashi_vault_option_adapter.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "525504b9d635e74afe7e24efbb2431e9cd77ceb6b27b1ba1ebf2da92c8ac5901", + "format": 1 + }, + { + "name": "tests/unit/plugins/plugin_utils/test_hashi_vault_common_stringify.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "30a555505f23377a8850ec84ad5df84a192dd24ba5774fb473fa67f97dac381b", + "format": 1 + }, + { + "name": "tests/unit/plugins/plugin_utils/test_hashi_vault_helper.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "14488b16acbb0cf0d991a84f4fe1805fb0ac5f0cfa4529cf1b57b7b448adc907", + "format": 1 + }, + { + "name": "tests/unit/conftest.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e29931074c13b6d08f944adba9ce6fbaf0e2b8585a9bceeac51cde6c99ab83e1", + "format": 1 + }, + { + "name": "tests/unit/requirements.txt", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "1a7e15b893287335a87621a5738d46855524ee40b7e54c7af18fa567c85f30a5", + "format": 1 + }, + { + "name": "tests/utils", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/utils/constraints.txt", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "6587434a23557810c3378386918925a2bc372399682483bb1ee0c33001a30544", + "format": 1 + }, + { + "name": "tests/config.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "f5174fec8c0d86b9970fd83767ffc181f4d6ef25aaad4a1f12f9d73ad38a23de", + "format": 1 + }, + { + "name": ".git-blame-ignore-revs", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "559c96731959d3d5e516775f2e4329064991505ffefb2f3c84d3b1f814270f90", + "format": 1 + }, + { + "name": "CHANGELOG.rst", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "79bab511c63a4f5ffdffbf0deb76f4b6aed0db672e46dc34413030f308cbba4a", + "format": 1 + }, + { + "name": "LICENSE", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "3972dc9744f6499f0f9b2dbf76696f2ae7ad8af9b23dde66d6af86c9dfb36986", + "format": 1 + }, + { + "name": "README.md", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "44caa26d837f0ae09a1f6d7f97fca696d26261ea4d94d6d30ccf58bb37864ae3", + "format": 1 + }, + { + "name": "codecov.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "18addb1569ed97c0c9c729e38eaef7ede87ad95c6d4f557bab5c8784fba7b6b7", + "format": 1 + } + ], + "format": 1 +}
\ No newline at end of file diff --git a/ansible_collections/community/hashi_vault/LICENSE b/ansible_collections/community/hashi_vault/LICENSE new file mode 100644 index 000000000..f288702d2 --- /dev/null +++ b/ansible_collections/community/hashi_vault/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/> + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + <one line to give the program's name and a brief idea of what it does.> + Copyright (C) <year> <name of author> + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see <https://www.gnu.org/licenses/>. + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + <program> Copyright (C) <year> <name of author> + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +<https://www.gnu.org/licenses/>. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +<https://www.gnu.org/licenses/why-not-lgpl.html>. diff --git a/ansible_collections/community/hashi_vault/LICENSES/BSD-2-Clause.txt b/ansible_collections/community/hashi_vault/LICENSES/BSD-2-Clause.txt new file mode 100644 index 000000000..e34763968 --- /dev/null +++ b/ansible_collections/community/hashi_vault/LICENSES/BSD-2-Clause.txt @@ -0,0 +1,7 @@ +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/ansible_collections/community/hashi_vault/LICENSES/GPL-3.0-or-later.txt b/ansible_collections/community/hashi_vault/LICENSES/GPL-3.0-or-later.txt new file mode 100644 index 000000000..f288702d2 --- /dev/null +++ b/ansible_collections/community/hashi_vault/LICENSES/GPL-3.0-or-later.txt @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/> + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + <one line to give the program's name and a brief idea of what it does.> + Copyright (C) <year> <name of author> + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see <https://www.gnu.org/licenses/>. + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + <program> Copyright (C) <year> <name of author> + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +<https://www.gnu.org/licenses/>. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +<https://www.gnu.org/licenses/why-not-lgpl.html>. diff --git a/ansible_collections/community/hashi_vault/MANIFEST.json b/ansible_collections/community/hashi_vault/MANIFEST.json new file mode 100644 index 000000000..3845b05b1 --- /dev/null +++ b/ansible_collections/community/hashi_vault/MANIFEST.json @@ -0,0 +1,39 @@ +{ + "collection_info": { + "namespace": "community", + "name": "hashi_vault", + "version": "4.2.1", + "authors": [ + "Julie Davila (@juliedavila) <julie(at)davila.io>", + "Brian Scholer (@briantist)" + ], + "readme": "README.md", + "tags": [ + "vault", + "hashicorp", + "secret", + "secrets", + "password", + "passwords" + ], + "description": "Plugins related to HashiCorp Vault", + "license": [ + "GPL-3.0-or-later", + "BSD-2-Clause" + ], + "license_file": null, + "dependencies": {}, + "repository": "https://github.com/ansible-collections/community.hashi_vault", + "documentation": "https://docs.ansible.com/ansible/devel/collections/community/hashi_vault", + "homepage": "https://github.com/ansible-collections/community.hashi_vault", + "issues": "https://github.com/ansible-collections/community.hashi_vault/issues" + }, + "file_manifest_file": { + "name": "FILES.json", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a3546d2747fa04f54d5d67aee79aeb8d80598df269a6f484ba9ff2a0fc3895e1", + "format": 1 + }, + "format": 1 +}
\ No newline at end of file diff --git a/ansible_collections/community/hashi_vault/README.md b/ansible_collections/community/hashi_vault/README.md new file mode 100644 index 000000000..c8cf2b82c --- /dev/null +++ b/ansible_collections/community/hashi_vault/README.md @@ -0,0 +1,130 @@ +# community.hashi_vault Collection +<!-- Add CI and code coverage badges here. Samples included below. --> +[![CI](https://github.com/ansible-collections/community.hashi_vault/workflows/CI/badge.svg?event=push)](https://github.com/ansible-collections/community.hashi_vault/actions) [![Codecov](https://img.shields.io/codecov/c/github/ansible-collections/community.hashi_vault)](https://codecov.io/gh/ansible-collections/community.hashi_vault) + +<!-- Describe the collection and why a user would want to use it. What does the collection do? --> +## Collection Documentation + +Browsing the [**latest** collection documentation](https://docs.ansible.com/ansible/latest/collections/community/hashi_vault) will show docs for the _latest version released in the Ansible package_ not the latest version of the collection released on Galaxy. + +Browsing the [**devel** collection documentation](https://docs.ansible.com/ansible/devel/collections/community/hashi_vault) shows docs for the _latest version released on Galaxy_. + +We also separately publish [**latest commit** collection documentation](https://ansible-collections.github.io/community.hashi_vault/branch/main/) which shows docs for the _latest commit in the `main` branch_. + +If you use the Ansible package and don't update collections independently, use **latest**, if you install or update this collection directly from Galaxy, use **devel**. If you are looking to contribute, use **latest commit**. +## Tested with Ansible + +* 2.11 +* 2.12 +* 2.13 +* 2.14 +* 2.15 +* devel (latest development commit) + +See [the CI configuration](https://github.com/ansible-collections/community.hashi_vault/blob/main/.github/workflows/ansible-test.yml) for the most accurate testing information. +<!-- List the versions of Ansible the collection has been tested with. Must match what is in galaxy.yml. --> + +## Tested with Vault + +We currently test against the latest patch version within the latest two minor versions of the latest major version of Vault. Put another way, we test against version `Z.{Z|Y}.Z`. For example as of this writing, Vault is on major version `1`, with the latest two minors being `8` and `7`. So we'll test Vault `1.8.Z` and `1.7.Z` where `Z` is the latest patch within those versions. + +We do not test against any versions of Vault with major version `0` or against pre-release/release candidate (RC) versions. + +If/when a new major version of Vault is released, we'll revisit which and how many versions to test against. + +The decision of which version(s) of Vault to test against is still somewhat in flux, as we try to balance wide testing with CI execution time and resources. + +See [the CI configuration](https://github.com/ansible-collections/community.hashi_vault/blob/main/.github/workflows/ansible-test.yml) for the most accurate testing information. + +## Python Requirements + +**Python 2.6, 2.7, and 3.5 are not supported in version `2.0.0` or later of the collection.** + +Currently we support and test against Python versions: +* 3.6 +* 3.7 +* 3.8 +* 3.9 +* 3.10 +* 3.11 + +Note that for controller-side plugins, only the Python versions supported by the Ansible controller are supported (for example, you cannot use Python 3.7 with Ansible core 2.12). + +## External requirements + +The `hvac` Python library is required for this collection. [For full requirements and details, see the collection's User Guide](https://docs.ansible.com/ansible/devel/collections/community/hashi_vault/docsite/user_guide.html#requirements). + +## Included content + +[See the list of included content in the docsite](https://docs.ansible.com/ansible/devel/collections/community/hashi_vault/#plugin-index). + +## Using this collection + +<!--Include some quick examples that cover the most common use cases for your collection content. --> + +See [Ansible Using collections](https://docs.ansible.com/ansible/latest/user_guide/collections_using.html) for more details. + +## Contributing to this collection + +See the contributor guide in the [**devel** collection documentation](https://docs.ansible.com/ansible/devel/collections/community/hashi_vault). + +<!--Describe how the community can contribute to your collection. At a minimum, include how and where users can create issues to report problems or request features for this collection. List contribution requirements, including preferred workflows and necessary testing, so you can benefit from community PRs. If you are following general Ansible contributor guidelines, you can link to - [Ansible Community Guide](https://docs.ansible.com/ansible/latest/community/index.html). --> + +## Releasing this collection (for maintainers) +[Follow the instructions for releasing small collections in the Ansible community wiki](https://github.com/ansible/community/wiki/ReleasingCollections#releasing-without-release-branches-for-smaller-collections). + +Once the new collection is published and the Zuul job is finished, add a release in GitHub by [manually running the `GitHub Release` workflow](https://github.com/ansible-collections/community.hashi_vault/actions/workflows/github-release.yml). You'll need to enter the version number, which should exactly match the tag used to release the collection. + +## Release notes + +See the [rendered changelog](https://ansible-collections.github.io/community.hashi_vault/branch/main/collections/community/hashi_vault/docsite/CHANGELOG.html) or the [raw generated changelog](https://github.com/ansible-collections/community.hashi_vault/tree/main/CHANGELOG.rst). + +## FAQ + +### **Q:** Why not have a single collection of HashiCorp products instead of one just for Vault? + +**A:** This was considered when the `hashi_vault` plugin was first moved from `community.general` to this collection. There are several reasons behind this: + +* The other known HashiCorp content at that time (covering Consul, Nomad, Terraform, etc.) does not share implementation or testing with Vault content. +* The maintainers are also different. This being a community supported collection means separate maintainers are more likely to focus on goals that make sense for their particular plugins and user base. +* The HashiCorp products serve different goals, and even when used together, they have their own APIs and interfaces that don't really have anything in common from the point of view of the Ansible codebase as a consumer. +* It would complicate testing. One of the primary goals of moving to a new collection was the ability to increase the scope of Vault-focused testing without having to balance the impact to unrelated components. +* It makes for a smaller package for consumers, that can hopefully release more quickly. + +### **Q:** Why is the collection named `community.hashi_vault` instead of `community.vault` or `community.hashicorp_vault` or `hashicorp.vault` or any number of other names? + +**A:** This too was considered during formation. In the end, `hashi_vault` is a compromise of various concerns. + +* `hashicorp.vault` looks great, but implies the collection is supported by HashiCorp (which it is not). That doesn't follow the convention of denoting community supported namespaces with `community.` +* `community.vault` looks great at first, but "Vault" is a very general and overloaded term, and in Ansible the first "Vault" one thinks of is [Ansible Vault](https://docs.ansible.com/ansible/latest/user_guide/vault.html). So in the naming, and even in the future of this collection and its content, we have to be mindful of avoiding and removing ambiguities between these products (and other Vaults out there). +* `community.hashicorp_vault` is descriptive and unambiguous but is unfortunately quite long. +* `community.hashicorp` would be good for a collection that aims to contain community-supported content related to all HashiCorp products, but this collection is only focused on Vault (see above question). +* `community.hashicorp.vault` (or any other 3-component name): not supported (also long). +* `community.hashi_vault` isn't perfect, but has an established convention in the existing plugin name and isn't as long as `hashicorp_vault`. + + +## Roadmap + +<!-- Optional. Include the roadmap for this collection, and the proposed release/versioning strategy so users can anticipate the upgrade/update cycle. --> + +## More information + +<!-- List out where the user can find additional information, such as working group meeting times, slack/IRC channels, or documentation for the product this collection automates. At a minimum, link to: --> + +- [Ansible Collection overview](https://github.com/ansible-collections/overview) +- [Ansible User guide](https://docs.ansible.com/ansible/latest/user_guide/index.html) +- [Ansible Developer guide](https://docs.ansible.com/ansible/latest/dev_guide/index.html) +- [Ansible Collections Checklist](https://github.com/ansible-collections/overview/blob/master/collection_requirements.rst) +- [Ansible Community code of conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html) +- [The Bullhorn (the Ansible Contributor newsletter)](https://us19.campaign-archive.com/home/?u=56d874e027110e35dea0e03c1&id=d6635f5420) +- [Changes impacting Contributors](https://github.com/ansible-collections/overview/issues/45) + +## Licensing + +<!-- Include the appropriate license information here and a pointer to the full licensing details. If the collection contains modules migrated from the ansible/ansible repo, you must use the same license that existed in the ansible/ansible repo. See the GNU license example below. --> + +GNU General Public License v3.0 or later. + +See [LICENSE](https://www.gnu.org/licenses/gpl-3.0.txt) to see the full text. + +Parts of the collection are licensed under the [BSD-2-Clause](https://opensource.org/licenses/BSD-2-Clause) license. diff --git a/ansible_collections/community/hashi_vault/changelogs/changelog.yaml b/ansible_collections/community/hashi_vault/changelogs/changelog.yaml new file mode 100644 index 000000000..cd982ec0e --- /dev/null +++ b/ansible_collections/community/hashi_vault/changelogs/changelog.yaml @@ -0,0 +1,646 @@ +ancestor: null +releases: + 0.1.0: + changes: + release_summary: 'Our first release matches the ``hashi_vault`` lookup functionality + provided by ``community.general`` version ``1.3.0``. + + ' + fragments: + - 0.1.0.yml + release_date: '2020-12-02' + 0.2.0: + changes: + bugfixes: + - hashi_vault - ``mount_point`` parameter did not work with ``aws_iam_login`` + auth method (https://github.com/ansible-collections/community.hashi_vault/issues/7) + - hashi_vault - fallback logic for handling deprecated style of auth in hvac + was not implemented correctly (https://github.com/ansible-collections/community.hashi_vault/pull/33). + - hashi_vault - parameter ``mount_point`` does not work with JWT auth (https://github.com/ansible-collections/community.hashi_vault/issues/29). + - hashi_vault - tokens without ``lookup-self`` ability can't be used because + of validation (https://github.com/ansible-collections/community.hashi_vault/issues/18). + deprecated_features: + - hashi_vault - ``VAULT_ADDR`` environment variable for option ``url`` will + have its precedence lowered in 1.0.0; use ``ANSIBLE_HASHI_VAULT_ADDR`` to + intentionally override a config value (https://github.com/ansible-collections/community.hashi_vault/issues/8). + - hashi_vault - ``VAULT_AUTH_METHOD`` environment variable for option ``auth_method`` + will be removed in 2.0.0, use ``ANSIBLE_HASHI_VAULT_AUTH_METHOD`` instead + (https://github.com/ansible-collections/community.hashi_vault/issues/17). + - hashi_vault - ``VAULT_ROLE_ID`` environment variable for option ``role_id`` + will be removed in 2.0.0, use ``ANSIBLE_HASHI_VAULT_ROLE_ID`` instead (https://github.com/ansible-collections/community.hashi_vault/issues/20). + - hashi_vault - ``VAULT_SECRET_ID`` environment variable for option ``secret_id`` + will be removed in 2.0.0, use ``ANSIBLE_HASHI_VAULT_SECRET_ID`` instead (https://github.com/ansible-collections/community.hashi_vault/issues/20). + - hashi_vault - ``VAULT_TOKEN_FILE`` environment variable for option ``token_file`` + will be removed in 2.0.0, use ``ANSIBLE_HASHI_VAULT_TOKEN_FILE`` instead (https://github.com/ansible-collections/community.hashi_vault/issues/15). + - hashi_vault - ``VAULT_TOKEN_PATH`` environment variable for option ``token_path`` + will be removed in 2.0.0, use ``ANSIBLE_HASHI_VAULT_TOKEN_PATH`` instead (https://github.com/ansible-collections/community.hashi_vault/issues/15). + minor_changes: + - Add optional ``aws_iam_server_id`` parameter as the value for ``X-Vault-AWS-IAM-Server-ID`` + header (https://github.com/ansible-collections/community.hashi_vault/pull/27). + - hashi_vault - ``ANSIBLE_HASHI_VAULT_ADDR`` environment variable added for + option ``url`` (https://github.com/ansible-collections/community.hashi_vault/issues/8). + - hashi_vault - ``ANSIBLE_HASHI_VAULT_AUTH_METHOD`` environment variable added + for option ``auth_method`` (https://github.com/ansible-collections/community.hashi_vault/issues/17). + - hashi_vault - ``ANSIBLE_HASHI_VAULT_ROLE_ID`` environment variable added for + option ``role_id`` (https://github.com/ansible-collections/community.hashi_vault/issues/20). + - hashi_vault - ``ANSIBLE_HASHI_VAULT_SECRET_ID`` environment variable added + for option ``secret_id`` (https://github.com/ansible-collections/community.hashi_vault/issues/20). + - hashi_vault - ``ANSIBLE_HASHI_VAULT_TOKEN_FILE`` environment variable added + for option ``token_file`` (https://github.com/ansible-collections/community.hashi_vault/issues/15). + - hashi_vault - ``ANSIBLE_HASHI_VAULT_TOKEN_PATH`` environment variable added + for option ``token_path`` (https://github.com/ansible-collections/community.hashi_vault/issues/15). + - hashi_vault - ``namespace`` parameter can be specified in INI or via env vars + ``ANSIBLE_HASHI_VAULT_NAMESPACE`` (new) and ``VAULT_NAMESPACE`` (lower preference) (https://github.com/ansible-collections/community.hashi_vault/issues/14). + - hashi_vault - ``token`` parameter can now be specified via ``ANSIBLE_HASHI_VAULT_TOKEN`` + as well as via ``VAULT_TOKEN`` (the latter with lower preference) (https://github.com/ansible-collections/community.hashi_vault/issues/16). + - hashi_vault - add ``token_validate`` option to control token validation (https://github.com/ansible-collections/community.hashi_vault/pull/24). + - hashi_vault - uses new AppRole method in hvac 0.10.6 with fallback to deprecated + method with warning (https://github.com/ansible-collections/community.hashi_vault/pull/33). + release_summary: 'Several backwards-compatible bugfixes and enhancements in + this release. + + Some environment variables are deprecated and have standardized replacements.' + fragments: + - 0.2.0.yml + - 22-hashi_vault-aws_iam_login-mount_point.yml + - 24-hashi_vault-token_validation.yml + - 25-non-breaking-env-parameter-changes.yml + - 27-add-hashi_vault-header_value-param.yml + - 31-jwt-mount_point.yml + - 33-approle-deprecation.yml + - 35-env-var-deprecations.yml + release_date: '2020-12-24' + 1.0.0: + changes: + breaking_changes: + - hashi_vault - the ``VAULT_ADDR`` environment variable is now checked last + for the ``url`` parameter. For details on which use cases are impacted, see + (https://github.com/ansible-collections/community.hashi_vault/issues/8). + release_summary: Our first major release contains a single breaking change that + will affect only a small subset of users. No functionality is removed. See + the details in the changelog to determine if you're affected and if so how + to transition to remediate. + fragments: + - 1.0.0.yml + - 41-lower-url-env-precedence.yml + release_date: '2020-12-30' + 1.1.0: + changes: + minor_changes: + - hashi_vault - add ``proxies`` option (https://github.com/ansible-collections/community.hashi_vault/pull/50). + release_summary: This release contains a new ``proxies`` option for the ``hashi_vault`` + lookup. + fragments: + - 1.1.0.yml + - 50-add-proxies-option.yml + release_date: '2021-02-08' + 1.1.1: + changes: + bugfixes: + - hashi_vault - restore use of ``VAULT_ADDR`` environment variable as a low + preference env var (https://github.com/ansible-collections/community.hashi_vault/pull/61). + release_summary: 'This bugfix release restores the use of the ``VAULT_ADDR`` + environment variable for setting the ``url`` option. + + See the PR linked from the changelog entry for details and workarounds if + you cannot upgrade.' + fragments: + - 1.1.1.yml + - 41-fix-vault-addr.yml + release_date: '2021-02-24' + 1.1.2: + changes: + release_summary: This release contains the same functionality as 1.1.1. The + only change is to mark some code as internal to the collection. If you are + already using 1.1.1 as an end user you do not need to update. + fragments: + - 1.1.2.yml + release_date: '2021-03-02' + 1.1.3: + changes: + bugfixes: + - hashi_vault - userpass authentication did not work with hvac 0.9.6 or higher + (https://github.com/ansible-collections/community.hashi_vault/pull/68). + release_summary: This release fixes a bug with ``userpass`` authentication and + ``hvac`` versions 0.9.6 and higher. + fragments: + - 1.1.3.yml + - 68-fix-userpass-auth.yml + release_date: '2021-03-19' + 1.2.0: + changes: + deprecated_features: + - hashi_vault collection - support for Python 2 will be dropped in version ``2.0.0`` + of ``community.hashi_vault`` (https://github.com/ansible-collections/community.hashi_vault/issues/81). + minor_changes: + - hashi_vault lookup - add ``ANSIBLE_HASHI_VAULT_CA_CERT`` env var (with ``VAULT_CACERT`` + low-precedence fallback) for ``ca_cert`` option (https://github.com/ansible-collections/community.hashi_vault/pull/97). + - hashi_vault lookup - add ``ANSIBLE_HASHI_VAULT_PASSWORD`` env var and ``ansible_hashi_vault_password`` + ansible var for ``password`` option (https://github.com/ansible-collections/community.hashi_vault/pull/96). + - hashi_vault lookup - add ``ANSIBLE_HASHI_VAULT_USERNAME`` env var and ``ansible_hashi_vault_username`` + ansible var for ``username`` option (https://github.com/ansible-collections/community.hashi_vault/pull/96). + - hashi_vault lookup - add ``ansible_hashi_vault_auth_method`` Ansible vars + entry to the ``proxies`` option (https://github.com/ansible-collections/community.hashi_vault/pull/86). + - hashi_vault lookup - add ``ansible_hashi_vault_ca_cert`` ansible var for ``ca_cert`` + option (https://github.com/ansible-collections/community.hashi_vault/pull/97). + - hashi_vault lookup - add ``ansible_hashi_vault_namespace`` Ansible vars entry + to the ``namespace`` option (https://github.com/ansible-collections/community.hashi_vault/pull/86). + - hashi_vault lookup - add ``ansible_hashi_vault_proxies`` Ansible vars entry + to the ``proxies`` option (https://github.com/ansible-collections/community.hashi_vault/pull/86). + - hashi_vault lookup - add ``ansible_hashi_vault_role_id`` Ansible vars entry + to the ``proxies`` option (https://github.com/ansible-collections/community.hashi_vault/pull/86). + - hashi_vault lookup - add ``ansible_hashi_vault_secret_id`` Ansible vars entry + to the ``proxies`` option (https://github.com/ansible-collections/community.hashi_vault/pull/86). + - hashi_vault lookup - add ``ansible_hashi_vault_token_file`` Ansible vars entry + to the ``token_file`` option (https://github.com/ansible-collections/community.hashi_vault/pull/95). + - hashi_vault lookup - add ``ansible_hashi_vault_token_path`` Ansible vars entry + to the ``token_path`` option (https://github.com/ansible-collections/community.hashi_vault/pull/95). + - hashi_vault lookup - add ``ansible_hashi_vault_token_validate`` Ansible vars + entry to the ``proxies`` option (https://github.com/ansible-collections/community.hashi_vault/pull/86). + - hashi_vault lookup - add ``ansible_hashi_vault_token`` Ansible vars entry + to the ``proxies`` option (https://github.com/ansible-collections/community.hashi_vault/pull/86). + - hashi_vault lookup - add ``ansible_hashi_vault_url`` and ``ansible_hashi_vault_addr`` + Ansible vars entries to the ``url`` option (https://github.com/ansible-collections/community.hashi_vault/pull/86). + - hashi_vault lookup - add ``ansible_hashi_vault_validate_certs`` Ansible vars + entry to the ``validate_certs`` option (https://github.com/ansible-collections/community.hashi_vault/pull/95). + - hashi_vault lookup - add ``ca_cert`` INI config file key ``ca_cert`` option + (https://github.com/ansible-collections/community.hashi_vault/pull/97). + - hashi_vault lookup - add ``none`` auth type which allows for passive auth + via a Vault agent (https://github.com/ansible-collections/community.hashi_vault/pull/80). + release_summary: 'This release brings several new ways of accessing options, + like using Ansible vars, and addng new environment variables and INI config + entries. + + A special ``none`` auth type is also added, for working with certain Vault + Agent configurations. + + This release also announces the deprecation of Python 2 support in version + ``2.0.0`` of the collection.' + fragments: + - 1.2.0.yml + - 80-add-none-auth-type.yml + - 81-deprecating-python2.yml + - 86-add-vars-options.yml + - 95-more-vars-options.yml + - 96-userpass-vars-env.yml + - 97-ca_cert-env-and-vars.yml + release_date: '2021-06-19' + 1.3.0: + changes: + minor_changes: + - hashi_vault lookup - add ``retries`` and ``retry_action`` to enable built-in + retry on failure (https://github.com/ansible-collections/community.hashi_vault/pull/71). + - hashi_vault lookup - add ``timeout`` option to control connection timeouts + (https://github.com/ansible-collections/community.hashi_vault/pull/100). + release_summary: This release adds two connection-based options for controlling + timeouts and retrying failed Vault requests. + fragments: + - 1.3.0.yml + - 100-add-timeout-option.yml + - 71-add-retries.yml + release_date: '2021-06-28' + 1.3.1: + changes: + release_summary: This release fixes an error in the documentation. No functionality + is changed so it's not necessary to upgrade from ``1.3.0``. + fragments: + - 1.3.1.yml + release_date: '2021-06-30' + 1.3.2: + changes: + deprecated_features: + - hashi_vault collection - support for Python 3.5 will be dropped in version + ``2.0.0`` of ``community.hashi_vault`` (https://github.com/ansible-collections/community.hashi_vault/issues/81). + minor_changes: + - hashi_vault collection - add ``execution-environment.yml`` and a python requirements + file to better support ``ansible-builder`` (https://github.com/ansible-collections/community.hashi_vault/pull/105). + release_summary: 'This release adds requirements detection support for Ansible + Execution Environments. It also updates and adds new guides in our `collection + docsite <https://docs.ansible.com/ansible/devel/collections/community/hashi_vault>`_. + + This release also announces the dropping of Python 3.5 support in version + ``2.0.0`` of the collection, alongside the previous announcement dropping + Python 2.x in ``2.0.0``.' + fragments: + - 1.3.2.yml + - 105-support-ansible-builder.yml + - 107-deprecating-python-35.yml + release_date: '2021-07-20' + 1.4.0: + changes: + bugfixes: + - aws_iam_login auth - the ``aws_security_token`` option was not used, causing + assumed role credentials to fail (https://github.com/ansible-collections/community.hashi_vault/issues/160). + - hashi_vault collection - a fallback import supporting the ``retries`` option + for ``urllib3`` via ``requests.packages.urllib3`` was not correctly formed + (https://github.com/ansible-collections/community.hashi_vault/issues/116). + - hashi_vault collection - unhandled exception with ``token`` auth when ``token_file`` + exists but is a directory (https://github.com/ansible-collections/community.hashi_vault/issues/152). + deprecated_features: + - lookup hashi_vault - the ``[lookup_hashi_vault]`` section in the ``ansible.cfg`` + file is deprecated and will be removed in collection version ``3.0.0``. Instead, + the section ``[hashi_vault_collection]`` can be used, which will apply to + all plugins in the collection going forward (https://github.com/ansible-collections/community.hashi_vault/pull/144). + minor_changes: + - community.hashi_vault collection - add cert auth method (https://github.com/ansible-collections/community.hashi_vault/pull/159). + release_summary: 'This release includes bugfixes, a new auth method (``cert``), + and the first new content since the collection''s formation, the ``vault_read`` + module and lookup plugin. + + We''re also announcing the deprecation of the ``[lookup_hashi_vault]`` INI + section (which will continue working up until its removal only for the ``hashi_vault`` + lookup), to be replaced by the ``[hashi_vault_collection]`` section that will + apply to all plugins in the collection.' + fragments: + - 1.4.0.yml + - 113-retry-fallback.yml + - 144-deprecate-lookup-ini.yml + - 154-token_file must be a file.yml + - 159-add-cert-auth.yml + - 161-aws-sts-token.yml + modules: + - description: Perform a read operation against HashiCorp Vault + name: vault_read + namespace: '' + plugins: + lookup: + - description: Perform a read operation against HashiCorp Vault + name: vault_read + namespace: null + release_date: '2021-10-25' + 1.4.1: + changes: + bugfixes: + - aws_iam_login auth method - fix incorrect use of ``boto3``/``botocore`` that + prevented proper loading of AWS IAM role credentials (https://github.com/ansible-collections/community.hashi_vault/issues/167). + release_summary: This release contains a bugfix for ``aws_iam_login`` authentication. + fragments: + - 1.4.1.yml + - 168-aws_auth-boto-bug.yml + release_date: '2021-10-28' + 1.5.0: + changes: + minor_changes: + - add the ``community.hashi_vault.vault`` action group (https://github.com/ansible-collections/community.hashi_vault/pull/172). + - auth methods - Add support for configuring the ``mount_point`` auth method + option in plugins via the ``ANSIBLE_HASHI_VAULT_MOUNT_POINT`` environment + variable, ``ansible_hashi_vault_mount_point`` ansible variable, or ``mount_point`` + INI section (https://github.com/ansible-collections/community.hashi_vault/pull/171). + release_summary: 'This release includes a new action group for use with ``module_defaults``, + and additional ways of specifying the ``mount_point`` option for plugins. + + This will be the last ``1.x`` release.' + fragments: + - 1.5.0.yml + - 172-action_group.yml + - pr-171-envvar-for-mount-point.yaml + release_date: '2021-11-05' + 2.0.0: + changes: + breaking_changes: + - connection options - there is no longer a default value for the ``url`` option + (the Vault address), so a value must be supplied (https://github.com/ansible-collections/community.hashi_vault/issues/83). + release_summary: 'Version 2.0.0 of the collection drops support for Python 2 + & Python 3.5, making Python 3.6 the minimum supported version. + + Some deprecated features and settings have been removed as well.' + removed_features: + - drop support for Python 2 and Python 3.5 (https://github.com/ansible-collections/community.hashi_vault/issues/81). + - 'support for the following deprecated environment variables has been removed: + ``VAULT_AUTH_METHOD``, ``VAULT_TOKEN_PATH``, ``VAULT_TOKEN_FILE``, ``VAULT_ROLE_ID``, + ``VAULT_SECRET_ID`` (https://github.com/ansible-collections/community.hashi_vault/pull/173).' + fragments: + - 173-deprecated-env-vars.yml + - 176-url-is-required.yml + - 177-drop-py2-3.5.yml + - 2.0.0.yml + release_date: '2021-11-06' + 2.1.0: + changes: + deprecated_features: + - Support for Ansible 2.9 and ansible-base 2.10 is deprecated, and will be removed + in the next major release (community.hashi_vault 3.0.0) next spring (https://github.com/ansible-community/community-topics/issues/50, + https://github.com/ansible-collections/community.hashi_vault/issues/189). + - aws_iam_login auth method - the ``aws_iam_login`` method has been renamed + to ``aws_iam``. The old name will be removed in collection version ``3.0.0``. + Until then both names will work, and a warning will be displayed when using + the old name (https://github.com/ansible-collections/community.hashi_vault/pull/193). + release_summary: The most important change in this release is renaming the ``aws_iam_login`` + auth method to ``aws_iam`` and deprecating the old name. This release also + announces the deprecation of Ansible 2.9 and ansible-base 2.10 support in + 3.0.0. + removed_features: + - the "legacy" integration test setup has been removed; this does not affect + end users and is only relevant to contributors (https://github.com/ansible-collections/community.hashi_vault/pull/191). + fragments: + - 190-deprecate-ansible-2.9-2.10.yml + - 191-remove-legacy-integration.yml + - 193-rename-aws-iam-auth.yml + - 2.1.0.yml + release_date: '2021-12-03' + 2.2.0: + changes: + minor_changes: + - The Filter guide has been added to the collection's docsite. + release_summary: This release contains a new lookup/module combo for logging + in to Vault, and includes our first filter plugin. + fragments: + - 199-vault_login-vault_login_token.yml + - 2.2.0.yml + modules: + - description: Perform a login operation against HashiCorp Vault + name: vault_login + namespace: '' + plugins: + filter: + - description: Extracts the client token from a Vault login response + name: vault_login_token + namespace: null + lookup: + - description: Perform a login operation against HashiCorp Vault + name: vault_login + namespace: null + release_date: '2022-01-05' + 2.3.0: + changes: + release_summary: This release contains new plugins and modules for creating + tokens and for generating certificates with Vault's PKI secrets engine. + fragments: + - 2.3.0.yml + modules: + - description: Generates a new set of credentials (private key and certificate) + using HashiCorp Vault PKI + name: vault_pki_generate_certificate + namespace: '' + - description: Create a HashiCorp Vault token + name: vault_token_create + namespace: '' + plugins: + lookup: + - description: Create a HashiCorp Vault token + name: vault_token_create + namespace: null + release_date: '2022-02-15' + 2.4.0: + changes: + release_summary: Our first content for writing to Vault is now live. + fragments: + - 2.4.0.yml + modules: + - description: Perform a write operation against HashiCorp Vault + name: vault_write + namespace: '' + plugins: + lookup: + - description: Perform a write operation against HashiCorp Vault + name: vault_write + namespace: null + release_date: '2022-03-31' + 2.5.0: + changes: + deprecated_features: + - token_validate options - the shared auth option ``token_validate`` will change + its default from ``True`` to ``False`` in community.hashi_vault version 4.0.0. + The ``vault_login`` lookup and module will keep the default value of ``True`` + (https://github.com/ansible-collections/community.hashi_vault/issues/248). + minor_changes: + - vault_login module & lookup - no friendly error message was given when ``hvac`` + was missing (https://github.com/ansible-collections/community.hashi_vault/issues/257). + - vault_pki_certificate - add ``vault_pki_certificate`` to the ``community.hashi_vault.vault`` + action group (https://github.com/ansible-collections/community.hashi_vault/issues/251). + - vault_read module & lookup - no friendly error message was given when ``hvac`` + was missing (https://github.com/ansible-collections/community.hashi_vault/issues/257). + - vault_token_create - add ``vault_token_create`` to the ``community.hashi_vault.vault`` + action group (https://github.com/ansible-collections/community.hashi_vault/issues/251). + - vault_token_create module & lookup - no friendly error message was given when + ``hvac`` was missing (https://github.com/ansible-collections/community.hashi_vault/issues/257). + - vault_write - add ``vault_write`` to the ``community.hashi_vault.vault`` action + group (https://github.com/ansible-collections/community.hashi_vault/issues/251). + release_summary: 'This release finally contains dedicated KV plugins and modules, + and an exciting new lookup to help use plugin values in module calls. + + With that, we also have a guide in the collection docsite for migrating away + from the ``hashi_vault`` lookup toward dedicated content. + + We are also announcing that the ``token_validate`` option will change its + default value in version 4.0.0. + + This is the last planned release before 3.0.0. See the porting guide for breaking + changes and removed features in the next version.' + fragments: + - 2.5.0.yml + - 246-action_group.yml + - 258-token_validate-default.yml + - 259-hvac-checks.yml + modules: + - description: Get a secret from HashiCorp Vault's KV version 1 secret store + name: vault_kv1_get + namespace: '' + - description: Get a secret from HashiCorp Vault's KV version 2 secret store + name: vault_kv2_get + namespace: '' + plugins: + lookup: + - description: Returns plugin settings (options) + name: vault_ansible_settings + namespace: null + - description: Get a secret from HashiCorp Vault's KV version 1 secret store + name: vault_kv1_get + namespace: null + - description: Get a secret from HashiCorp Vault's KV version 2 secret store + name: vault_kv2_get + namespace: null + release_date: '2022-05-11' + 3.0.0: + changes: + deprecated_features: + - token_validate options - the shared auth option ``token_validate`` will change + its default from ``true`` to ``false`` in community.hashi_vault version 4.0.0. + The ``vault_login`` lookup and module will keep the default value of ``true`` + (https://github.com/ansible-collections/community.hashi_vault/issues/248). + release_summary: 'Version 3.0.0 of ``community.hashi_vault`` drops support for + Ansible 2.9 and ansible-base 2.10. + + Several deprecated features have been removed. See the changelog for the full + list.' + removed_features: + - aws_iam auth - the deprecated alias ``aws_iam_login`` for the ``aws_iam`` + value of the ``auth_method`` option has been removed (https://github.com/ansible-collections/community.hashi_vault/issues/194). + - community.hashi_vault collection - support for Ansible 2.9 and ansible-base + 2.10 has been removed (https://github.com/ansible-collections/community.hashi_vault/issues/189). + - hashi_vault lookup - the deprecated ``[lookup_hashi_vault]`` INI config section + has been removed in favor of the collection-wide ``[hashi_vault_collection]`` + section (https://github.com/ansible-collections/community.hashi_vault/issues/179). + fragments: + - 179-remove-lookup_hashi_vault-ini.yml + - 189-remove-ansible-2_9-2_10-support.yml + - 194-remove-aws_iam_login.yml + - 248-token_validate-change-default.yml + - 3.0.0.yml + release_date: '2022-05-21' + 3.1.0: + changes: + bugfixes: + - Add SPDX license headers to individual files (https://github.com/ansible-collections/community.hashi_vault/pull/282). + - Add missing ``BSD-2-Clause.txt`` file for BSD licensed content (https://github.com/ansible-collections/community.hashi_vault/issues/275). + - Use the correct GPL license for plugin_utils (https://github.com/ansible-collections/community.hashi_vault/issues/276). + deprecated_features: + - vault_kv2_get lookup - the ``engine_mount_point option`` in the ``vault_kv2_get`` + lookup only will change its default from ``kv`` to ``secret`` in community.hashi_vault + version 4.0.0 (https://github.com/ansible-collections/community.hashi_vault/issues/279). + release_summary: 'A default value that was set incorrectly will be corrected + in ``4.0.0``. + + A deprecation warning will be shown until then if the value is not specified + explicitly. + + This version also includes some fixes and improvements to the licensing in + the collection, which does not affect any functionality.' + fragments: + - 279-incorrect-kv2-lookup-default.yml + - 3.1.0.yml + - licensing.yml + release_date: '2022-07-17' + 3.2.0: + changes: + bugfixes: + - community.hashi_vault plugins - tokens will be cast to a string type before + being sent to ``hvac`` to prevent errors in ``requests`` when values are ``AnsibleUnsafe`` + (https://github.com/ansible-collections/community.hashi_vault/issues/289). + - modules - fix a "variable used before assignment" that cannot be reached but + causes sanity test failures (https://github.com/ansible-collections/community.hashi_vault/issues/296). + minor_changes: + - community.hashi_vault collection - add support for ``azure`` auth method, + for Azure service principal, managed identity, or plain JWT access token (https://github.com/ansible-collections/community.hashi_vault/issues/293). + - community.hashi_vault retries - `HTTP status code 412 <https://www.vaultproject.io/api-docs#412>`__ + has been added to the default list of codes to be retried, for the new `Server + Side Consistent Token feature <https://www.vaultproject.io/docs/faq/ssct#q-is-there-anything-else-i-need-to-consider-to-achieve-consistency-besides-upgrading-to-vault-1-10>`__ + in Vault Enterprise (https://github.com/ansible-collections/community.hashi_vault/issues/290). + release_summary: This release brings support for the ``azure`` auth method, + adds ``412`` to the default list of HTTP status codes to be retried, and fixes + a bug that causes failures in token auth with ``requests>=2.28.0``. + fragments: + - 289-handle-unsafe-strings.yml + - 290-retry-http-412.yml + - 293-support-azure-auth-method.yml + - 296-use-before-assignment.yml + - 3.2.0.yml + release_date: '2022-08-21' + 3.3.0: + changes: + minor_changes: + - vault_token_create - creation or orphan tokens uses ``hvac``'s new v1 method + for creating orphans, or falls back to the v0 method if needed (https://github.com/ansible-collections/community.hashi_vault/issues/301). + release_summary: 'With the release of ``hvac`` version ``1.0.0``, we needed + to update ``vault_token_create``''s support for orphan tokens. + + The collection''s changelog is now viewable in the Ansible documentation site.' + fragments: + - 3.3.0.yml + - 301-orphan-token-handling.yml + release_date: '2022-09-19' + 3.3.1: + changes: + release_summary: No functional changes in this release, this provides updated + filter documentation for the public docsite. + fragments: + - 3.3.1.yml + release_date: '2022-09-25' + 3.4.0: + changes: + bugfixes: + - connection options - the ``namespace`` connection option will be forced into + a string to ensure cmpatibility with recent ``requests`` versions (https://github.com/ansible-collections/community.hashi_vault/issues/309). + minor_changes: + - vault_pki_generate_certificate - the documentation has been updated to match + the argspec for the default values of options ``alt_names``, ``ip_sans``, + ``other_sans``, and ``uri_sans`` (https://github.com/ansible-collections/community.hashi_vault/pull/318). + release_summary: 'This release includes a new module, fixes (another) ``requests`` + header issue, and updates some inaccurate documentation. + + This is the last planned release before v4.0.0.' + fragments: + - 3.4.0.yml + - 309-stringify-namespace.yml + - 318-pki-argspec-doc-mismatch.yml + modules: + - description: Delete one or more versions of a secret from HashiCorp Vault's + KV version 2 secret store + name: vault_kv2_delete + namespace: '' + release_date: '2022-11-03' + 4.0.0: + changes: + breaking_changes: + - auth - the default value for ``token_validate`` has changed from ``true`` + to ``false``, as previously announced (https://github.com/ansible-collections/community.hashi_vault/issues/248). + - vault_kv2_get lookup - as previously announced, the default value for ``engine_mount_point`` + in the ``vault_kv2_get`` lookup has changed from ``kv`` to ``secret`` (https://github.com/ansible-collections/community.hashi_vault/issues/279). + minor_changes: + - modules - all modules now document their action group and support for check + mode in their attributes documentation (https://github.com/ansible-collections/community.hashi_vault/issues/197). + release_summary: The next major version of the collection includes previously + announced breaking changes to some default values, and improvements to module + documentation with attributes that describe the use of action groups and check + mode support. + fragments: + - 197-module-attributes.yml + - 248-token_validate-default.yml + - 279-vault_kv2_get-lookup-mount-default.yml + - 4.0.0.yml + release_date: '2022-11-05' + 4.1.0: + changes: + deprecated_features: + - ansible-core - support for ``ansible-core`` versions ``2.11`` and ``2.12`` + will be dropped in collection version ``5.0.0``, making ``2.13`` the minimum + supported version of ``ansible-core`` (https://github.com/ansible-collections/community.hashi_vault/issues/340). + - hvac - the minimum version of ``hvac`` to be supported in collection version + ``5.0.0`` will be at least ``1.0.2``; this minimum may be raised before ``5.0.0`` + is released, so please subscribe to the linked issue and look out for new + notices in the changelog (https://github.com/ansible-collections/community.hashi_vault/issues/324). + release_summary: 'This release brings new generic ``vault_list`` plugins from + a new contributor! + + There are also some deprecation notices for the next major version, and some + updates to documentation attributes.' + fragments: + - 324-deprecate-hvac.yml + - 325-fix attributes.yml + - 340-deprecate-core-211-212.yml + - 4.1.0.yml + modules: + - description: Perform a list operation against HashiCorp Vault + name: vault_list + namespace: '' + plugins: + lookup: + - description: Perform a list operation against HashiCorp Vault + name: vault_list + namespace: null + release_date: '2023-01-18' + 4.2.0: + changes: + bugfixes: + - hashi_vault lookup - a term string with duplicate options would silently use + the last value. The lookup now shows a warning on option duplication (https://github.com/ansible-collections/community.hashi_vault/issues/349). + deprecated_features: + - hashi_vault lookup - in ``v5.0.0`` duplicate term string options will raise + an exception instead of showing a warning (https://github.com/ansible-collections/community.hashi_vault/issues/356). + release_summary: This release contains a new module for KVv2 writes, and a new + warning for duplicated term string options in the ``hashi_vault`` lookup. + fragments: + - 350-raise-error-on-option-duplication-in-term-string.yml + - 4.2.0.yml + modules: + - description: Perform a write operation against a KVv2 secret in HashiCorp Vault + name: vault_kv2_write + namespace: '' + release_date: '2023-03-26' + 4.2.1: + changes: + release_summary: This patch version updates the documentation for the ``vault_kv2_write`` + module. There are no functional changes. + fragments: + - 4.2.1.yml + release_date: '2023-04-27' diff --git a/ansible_collections/community/hashi_vault/changelogs/config.yaml b/ansible_collections/community/hashi_vault/changelogs/config.yaml new file mode 100644 index 000000000..f7f950db0 --- /dev/null +++ b/ansible_collections/community/hashi_vault/changelogs/config.yaml @@ -0,0 +1,29 @@ +changelog_filename_template: ../CHANGELOG.rst +changelog_filename_version_depth: 0 +changes_file: changelog.yaml +changes_format: combined +keep_fragments: false +mention_ancestor: true +new_plugins_after_name: removed_features +notesdir: fragments +prelude_section_name: release_summary +prelude_section_title: Release Summary +sections: +- - major_changes + - Major Changes +- - minor_changes + - Minor Changes +- - breaking_changes + - Breaking Changes / Porting Guide +- - deprecated_features + - Deprecated Features +- - removed_features + - Removed Features (previously deprecated) +- - security_fixes + - Security Fixes +- - bugfixes + - Bugfixes +- - known_issues + - Known Issues +title: community.hashi_vault +trivial_section_name: trivial diff --git a/ansible_collections/community/hashi_vault/changelogs/fragments/.keep b/ansible_collections/community/hashi_vault/changelogs/fragments/.keep new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/ansible_collections/community/hashi_vault/changelogs/fragments/.keep diff --git a/ansible_collections/community/hashi_vault/codecov.yml b/ansible_collections/community/hashi_vault/codecov.yml new file mode 100644 index 000000000..1f0977656 --- /dev/null +++ b/ansible_collections/community/hashi_vault/codecov.yml @@ -0,0 +1,115 @@ +--- +ignore: + - tests/unit/compat/* + - tests/unit/**/conftest.py + - tests/unit/conftest.py + +fixes: + - "ansible_collections/community/hashi_vault/::" + +coverage: + status: + patch: + default: + target: 100% + +flags: + target_filter_vault_login_token: + paths: + - plugins/filter/vault_login_token.py + + target_module_vault_kv1_get: + paths: + - plugins/modules/vault_kv1_get.py + + target_module_vault_kv2_get: + paths: + - plugins/modules/vault_kv2_get.py + + target_module_vault_list: + paths: + - plugins/modules/vault_list.py + + target_module_vault_login: + paths: + - plugins/modules/vault_login.py + + target_module_vault_read: + paths: + - plugins/modules/vault_read.py + + target_module_vault_token_create: + paths: + - plugins/modules/vault_token_create.py + + target_module_vault_pki_generate_certificate: + paths: + - plugins/modules/vault_pki_generate_certificate + + target_lookup_vault_kv1_get: + paths: + - plugins/lookup/vault_kv1_get.py + + target_lookup_vault_kv2_get: + paths: + - plugins/lookup/vault_kv2_get.py + + target_lookup_vault_list: + paths: + - plugins/lookup/vault_list.py + + target_lookup_vault_login: + paths: + - plugins/lookup/vault_login.py + + target_lookup_vault_read: + paths: + - plugins/lookup/vault_read.py + + target_lookup_vault_token_create: + paths: + - plugins/lookup/vault_token_create.py + + target_lookup_hashi_vault: + paths: + - plugins/lookup/hashi_vault.py + + target_auth_approle: + paths: + - plugins/module_utils/_auth_method_approle.py + + target_auth_aws_iam: + paths: + - plugins/module_utils/_auth_method_aws_iam.py + + target_auth_azure: + paths: + - plugins/module_utils/_auth_method_azure.py + + target_auth_cert: + paths: + - plugins/module_utils/_auth_method_cert.py + + target_auth_jwt: + paths: + - plugins/module_utils/_auth_method_jwt.py + + target_auth_ldap: + paths: + - plugins/module_utils/_auth_method_ldap.py + + target_auth_none: + paths: + - plugins/module_utils/_auth_method_none.py + + target_auth_token: + paths: + - plugins/module_utils/_auth_method_token.py + + target_auth_userpass: + paths: + - plugins/module_utils/_auth_method_userpass.py + + target_connection_options: + paths: + - plugins/module_utils/_connection_options.py diff --git a/ansible_collections/community/hashi_vault/docs/docsite/extra-docs.yml b/ansible_collections/community/hashi_vault/docs/docsite/extra-docs.yml new file mode 100644 index 000000000..82e329fcf --- /dev/null +++ b/ansible_collections/community/hashi_vault/docs/docsite/extra-docs.yml @@ -0,0 +1,14 @@ +--- +sections: + - title: Changelog + toctree: + - CHANGELOG + - title: Guides + toctree: + - filter_guide + - user_guide + - migration_hashi_vault_lookup + - about_hashi_vault_lookup + - lookup_guide + - contributor_guide + - localenv_developer_guide diff --git a/ansible_collections/community/hashi_vault/docs/docsite/links.yml b/ansible_collections/community/hashi_vault/docs/docsite/links.yml new file mode 100644 index 000000000..a0d100880 --- /dev/null +++ b/ansible_collections/community/hashi_vault/docs/docsite/links.yml @@ -0,0 +1,41 @@ +--- +# based on https://github.com/ansible-collections/collection_template/blob/main/docs/docsite/links.yml +# +# This will make sure that plugin and module documentation gets Edit on GitHub links +# that allow users to directly create a PR for this plugin or module in GitHub's UI. +# Remove this section if the collection repository is not on GitHub, or if you do not want this +# functionality for your collection. +edit_on_github: + repository: ansible-collections/community.hashi_vault + branch: main + # If your collection root (the directory containing galaxy.yml) does not coincide with your + # repository's root, you have to specify the path to the collection root here. For example, + # if the collection root is in a subdirectory ansible_collections/community/REPO_NAME + # in your repository, you have to set path_prefix to 'ansible_collections/community/REPO_NAME'. + path_prefix: '' + +# Here you can add arbitrary extra links. Please keep the number of links down to a +# minimum! Also please keep the description short, since this will be the text put on +# a button. +# +# Also note that some links are automatically added from information in galaxy.yml. +# The following are automatically added: +# 1. A link to the issue tracker (if `issues` is specified); +# 2. A link to the homepage (if `homepage` is specified and does not equal the +# `documentation` or `repository` link); +# 3. A link to the collection's repository (if `repository` is specified). + +extra_links: + - description: Discussion, Q&A, troubleshooting + url: https://github.com/ansible-collections/community.hashi_vault/discussions + +# Specify communication channels for your collection. We suggest to not specify more +# than one place for communication per communication tool to avoid confusion. +communication: + matrix_rooms: + - topic: General usage and support questions + room: '#users:ansible.im' + irc_channels: + - topic: General usage and support questions + network: Libera + channel: '#ansible' diff --git a/ansible_collections/community/hashi_vault/docs/docsite/rst/CHANGELOG.rst b/ansible_collections/community/hashi_vault/docs/docsite/rst/CHANGELOG.rst new file mode 100644 index 000000000..5223d4a97 --- /dev/null +++ b/ansible_collections/community/hashi_vault/docs/docsite/rst/CHANGELOG.rst @@ -0,0 +1,604 @@ +=================================== +community.hashi_vault Release Notes +=================================== + +.. contents:: Topics + + +v4.2.1 +====== + +Release Summary +--------------- + +This patch version updates the documentation for the ``vault_kv2_write`` module. There are no functional changes. + +v4.2.0 +====== + +Release Summary +--------------- + +This release contains a new module for KVv2 writes, and a new warning for duplicated term string options in the ``hashi_vault`` lookup. + +Deprecated Features +------------------- + +- hashi_vault lookup - in ``v5.0.0`` duplicate term string options will raise an exception instead of showing a warning (https://github.com/ansible-collections/community.hashi_vault/issues/356). + +Bugfixes +-------- + +- hashi_vault lookup - a term string with duplicate options would silently use the last value. The lookup now shows a warning on option duplication (https://github.com/ansible-collections/community.hashi_vault/issues/349). + +New Modules +----------- + +- vault_kv2_write - Perform a write operation against a KVv2 secret in HashiCorp Vault + +v4.1.0 +====== + +Release Summary +--------------- + +This release brings new generic ``vault_list`` plugins from a new contributor! +There are also some deprecation notices for the next major version, and some updates to documentation attributes. + +Deprecated Features +------------------- + +- ansible-core - support for ``ansible-core`` versions ``2.11`` and ``2.12`` will be dropped in collection version ``5.0.0``, making ``2.13`` the minimum supported version of ``ansible-core`` (https://github.com/ansible-collections/community.hashi_vault/issues/340). +- hvac - the minimum version of ``hvac`` to be supported in collection version ``5.0.0`` will be at least ``1.0.2``; this minimum may be raised before ``5.0.0`` is released, so please subscribe to the linked issue and look out for new notices in the changelog (https://github.com/ansible-collections/community.hashi_vault/issues/324). + +New Plugins +----------- + +Lookup +~~~~~~ + +- vault_list - Perform a list operation against HashiCorp Vault + +New Modules +----------- + +- vault_list - Perform a list operation against HashiCorp Vault + +v4.0.0 +====== + +Release Summary +--------------- + +The next major version of the collection includes previously announced breaking changes to some default values, and improvements to module documentation with attributes that describe the use of action groups and check mode support. + +Minor Changes +------------- + +- modules - all modules now document their action group and support for check mode in their attributes documentation (https://github.com/ansible-collections/community.hashi_vault/issues/197). + +Breaking Changes / Porting Guide +-------------------------------- + +- auth - the default value for ``token_validate`` has changed from ``true`` to ``false``, as previously announced (https://github.com/ansible-collections/community.hashi_vault/issues/248). +- vault_kv2_get lookup - as previously announced, the default value for ``engine_mount_point`` in the ``vault_kv2_get`` lookup has changed from ``kv`` to ``secret`` (https://github.com/ansible-collections/community.hashi_vault/issues/279). + +v3.4.0 +====== + +Release Summary +--------------- + +This release includes a new module, fixes (another) ``requests`` header issue, and updates some inaccurate documentation. +This is the last planned release before v4.0.0. + +Minor Changes +------------- + +- vault_pki_generate_certificate - the documentation has been updated to match the argspec for the default values of options ``alt_names``, ``ip_sans``, ``other_sans``, and ``uri_sans`` (https://github.com/ansible-collections/community.hashi_vault/pull/318). + +Bugfixes +-------- + +- connection options - the ``namespace`` connection option will be forced into a string to ensure cmpatibility with recent ``requests`` versions (https://github.com/ansible-collections/community.hashi_vault/issues/309). + +New Modules +----------- + +- vault_kv2_delete - Delete one or more versions of a secret from HashiCorp Vault's KV version 2 secret store + +v3.3.1 +====== + +Release Summary +--------------- + +No functional changes in this release, this provides updated filter documentation for the public docsite. + +v3.3.0 +====== + +Release Summary +--------------- + +With the release of ``hvac`` version ``1.0.0``, we needed to update ``vault_token_create``'s support for orphan tokens. +The collection's changelog is now viewable in the Ansible documentation site. + +Minor Changes +------------- + +- vault_token_create - creation or orphan tokens uses ``hvac``'s new v1 method for creating orphans, or falls back to the v0 method if needed (https://github.com/ansible-collections/community.hashi_vault/issues/301). + +v3.2.0 +====== + +Release Summary +--------------- + +This release brings support for the ``azure`` auth method, adds ``412`` to the default list of HTTP status codes to be retried, and fixes a bug that causes failures in token auth with ``requests>=2.28.0``. + +Minor Changes +------------- + +- community.hashi_vault collection - add support for ``azure`` auth method, for Azure service principal, managed identity, or plain JWT access token (https://github.com/ansible-collections/community.hashi_vault/issues/293). +- community.hashi_vault retries - `HTTP status code 412 <https://www.vaultproject.io/api-docs#412>`__ has been added to the default list of codes to be retried, for the new `Server Side Consistent Token feature <https://www.vaultproject.io/docs/faq/ssct#q-is-there-anything-else-i-need-to-consider-to-achieve-consistency-besides-upgrading-to-vault-1-10>`__ in Vault Enterprise (https://github.com/ansible-collections/community.hashi_vault/issues/290). + +Bugfixes +-------- + +- community.hashi_vault plugins - tokens will be cast to a string type before being sent to ``hvac`` to prevent errors in ``requests`` when values are ``AnsibleUnsafe`` (https://github.com/ansible-collections/community.hashi_vault/issues/289). +- modules - fix a "variable used before assignment" that cannot be reached but causes sanity test failures (https://github.com/ansible-collections/community.hashi_vault/issues/296). + +v3.1.0 +====== + +Release Summary +--------------- + +A default value that was set incorrectly will be corrected in ``4.0.0``. +A deprecation warning will be shown until then if the value is not specified explicitly. +This version also includes some fixes and improvements to the licensing in the collection, which does not affect any functionality. + +Deprecated Features +------------------- + +- vault_kv2_get lookup - the ``engine_mount_point option`` in the ``vault_kv2_get`` lookup only will change its default from ``kv`` to ``secret`` in community.hashi_vault version 4.0.0 (https://github.com/ansible-collections/community.hashi_vault/issues/279). + +Bugfixes +-------- + +- Add SPDX license headers to individual files (https://github.com/ansible-collections/community.hashi_vault/pull/282). +- Add missing ``BSD-2-Clause.txt`` file for BSD licensed content (https://github.com/ansible-collections/community.hashi_vault/issues/275). +- Use the correct GPL license for plugin_utils (https://github.com/ansible-collections/community.hashi_vault/issues/276). + +v3.0.0 +====== + +Release Summary +--------------- + +Version 3.0.0 of ``community.hashi_vault`` drops support for Ansible 2.9 and ansible-base 2.10. +Several deprecated features have been removed. See the changelog for the full list. + +Deprecated Features +------------------- + +- token_validate options - the shared auth option ``token_validate`` will change its default from ``true`` to ``false`` in community.hashi_vault version 4.0.0. The ``vault_login`` lookup and module will keep the default value of ``true`` (https://github.com/ansible-collections/community.hashi_vault/issues/248). + +Removed Features (previously deprecated) +---------------------------------------- + +- aws_iam auth - the deprecated alias ``aws_iam_login`` for the ``aws_iam`` value of the ``auth_method`` option has been removed (https://github.com/ansible-collections/community.hashi_vault/issues/194). +- community.hashi_vault collection - support for Ansible 2.9 and ansible-base 2.10 has been removed (https://github.com/ansible-collections/community.hashi_vault/issues/189). +- hashi_vault lookup - the deprecated ``[lookup_hashi_vault]`` INI config section has been removed in favor of the collection-wide ``[hashi_vault_collection]`` section (https://github.com/ansible-collections/community.hashi_vault/issues/179). + +v2.5.0 +====== + +Release Summary +--------------- + +This release finally contains dedicated KV plugins and modules, and an exciting new lookup to help use plugin values in module calls. +With that, we also have a guide in the collection docsite for migrating away from the ``hashi_vault`` lookup toward dedicated content. +We are also announcing that the ``token_validate`` option will change its default value in version 4.0.0. +This is the last planned release before 3.0.0. See the porting guide for breaking changes and removed features in the next version. + +Minor Changes +------------- + +- vault_login module & lookup - no friendly error message was given when ``hvac`` was missing (https://github.com/ansible-collections/community.hashi_vault/issues/257). +- vault_pki_certificate - add ``vault_pki_certificate`` to the ``community.hashi_vault.vault`` action group (https://github.com/ansible-collections/community.hashi_vault/issues/251). +- vault_read module & lookup - no friendly error message was given when ``hvac`` was missing (https://github.com/ansible-collections/community.hashi_vault/issues/257). +- vault_token_create - add ``vault_token_create`` to the ``community.hashi_vault.vault`` action group (https://github.com/ansible-collections/community.hashi_vault/issues/251). +- vault_token_create module & lookup - no friendly error message was given when ``hvac`` was missing (https://github.com/ansible-collections/community.hashi_vault/issues/257). +- vault_write - add ``vault_write`` to the ``community.hashi_vault.vault`` action group (https://github.com/ansible-collections/community.hashi_vault/issues/251). + +Deprecated Features +------------------- + +- token_validate options - the shared auth option ``token_validate`` will change its default from ``True`` to ``False`` in community.hashi_vault version 4.0.0. The ``vault_login`` lookup and module will keep the default value of ``True`` (https://github.com/ansible-collections/community.hashi_vault/issues/248). + +New Plugins +----------- + +Lookup +~~~~~~ + +- vault_ansible_settings - Returns plugin settings (options) +- vault_kv1_get - Get a secret from HashiCorp Vault's KV version 1 secret store +- vault_kv2_get - Get a secret from HashiCorp Vault's KV version 2 secret store + +New Modules +----------- + +- vault_kv1_get - Get a secret from HashiCorp Vault's KV version 1 secret store +- vault_kv2_get - Get a secret from HashiCorp Vault's KV version 2 secret store + +v2.4.0 +====== + +Release Summary +--------------- + +Our first content for writing to Vault is now live. + +New Plugins +----------- + +Lookup +~~~~~~ + +- vault_write - Perform a write operation against HashiCorp Vault + +New Modules +----------- + +- vault_write - Perform a write operation against HashiCorp Vault + +v2.3.0 +====== + +Release Summary +--------------- + +This release contains new plugins and modules for creating tokens and for generating certificates with Vault's PKI secrets engine. + +New Plugins +----------- + +Lookup +~~~~~~ + +- vault_token_create - Create a HashiCorp Vault token + +New Modules +----------- + +- vault_pki_generate_certificate - Generates a new set of credentials (private key and certificate) using HashiCorp Vault PKI +- vault_token_create - Create a HashiCorp Vault token + +v2.2.0 +====== + +Release Summary +--------------- + +This release contains a new lookup/module combo for logging in to Vault, and includes our first filter plugin. + +Minor Changes +------------- + +- The Filter guide has been added to the collection's docsite. + +New Plugins +----------- + +Filter +~~~~~~ + +- vault_login_token - Extracts the client token from a Vault login response + +Lookup +~~~~~~ + +- vault_login - Perform a login operation against HashiCorp Vault + +New Modules +----------- + +- vault_login - Perform a login operation against HashiCorp Vault + +v2.1.0 +====== + +Release Summary +--------------- + +The most important change in this release is renaming the ``aws_iam_login`` auth method to ``aws_iam`` and deprecating the old name. This release also announces the deprecation of Ansible 2.9 and ansible-base 2.10 support in 3.0.0. + +Deprecated Features +------------------- + +- Support for Ansible 2.9 and ansible-base 2.10 is deprecated, and will be removed in the next major release (community.hashi_vault 3.0.0) next spring (https://github.com/ansible-community/community-topics/issues/50, https://github.com/ansible-collections/community.hashi_vault/issues/189). +- aws_iam_login auth method - the ``aws_iam_login`` method has been renamed to ``aws_iam``. The old name will be removed in collection version ``3.0.0``. Until then both names will work, and a warning will be displayed when using the old name (https://github.com/ansible-collections/community.hashi_vault/pull/193). + +Removed Features (previously deprecated) +---------------------------------------- + +- the "legacy" integration test setup has been removed; this does not affect end users and is only relevant to contributors (https://github.com/ansible-collections/community.hashi_vault/pull/191). + +v2.0.0 +====== + +Release Summary +--------------- + +Version 2.0.0 of the collection drops support for Python 2 & Python 3.5, making Python 3.6 the minimum supported version. +Some deprecated features and settings have been removed as well. + +Breaking Changes / Porting Guide +-------------------------------- + +- connection options - there is no longer a default value for the ``url`` option (the Vault address), so a value must be supplied (https://github.com/ansible-collections/community.hashi_vault/issues/83). + +Removed Features (previously deprecated) +---------------------------------------- + +- drop support for Python 2 and Python 3.5 (https://github.com/ansible-collections/community.hashi_vault/issues/81). +- support for the following deprecated environment variables has been removed: ``VAULT_AUTH_METHOD``, ``VAULT_TOKEN_PATH``, ``VAULT_TOKEN_FILE``, ``VAULT_ROLE_ID``, ``VAULT_SECRET_ID`` (https://github.com/ansible-collections/community.hashi_vault/pull/173). + +v1.5.0 +====== + +Release Summary +--------------- + +This release includes a new action group for use with ``module_defaults``, and additional ways of specifying the ``mount_point`` option for plugins. +This will be the last ``1.x`` release. + +Minor Changes +------------- + +- add the ``community.hashi_vault.vault`` action group (https://github.com/ansible-collections/community.hashi_vault/pull/172). +- auth methods - Add support for configuring the ``mount_point`` auth method option in plugins via the ``ANSIBLE_HASHI_VAULT_MOUNT_POINT`` environment variable, ``ansible_hashi_vault_mount_point`` ansible variable, or ``mount_point`` INI section (https://github.com/ansible-collections/community.hashi_vault/pull/171). + +v1.4.1 +====== + +Release Summary +--------------- + +This release contains a bugfix for ``aws_iam_login`` authentication. + +Bugfixes +-------- + +- aws_iam_login auth method - fix incorrect use of ``boto3``/``botocore`` that prevented proper loading of AWS IAM role credentials (https://github.com/ansible-collections/community.hashi_vault/issues/167). + +v1.4.0 +====== + +Release Summary +--------------- + +This release includes bugfixes, a new auth method (``cert``), and the first new content since the collection's formation, the ``vault_read`` module and lookup plugin. +We're also announcing the deprecation of the ``[lookup_hashi_vault]`` INI section (which will continue working up until its removal only for the ``hashi_vault`` lookup), to be replaced by the ``[hashi_vault_collection]`` section that will apply to all plugins in the collection. + +Minor Changes +------------- + +- community.hashi_vault collection - add cert auth method (https://github.com/ansible-collections/community.hashi_vault/pull/159). + +Deprecated Features +------------------- + +- lookup hashi_vault - the ``[lookup_hashi_vault]`` section in the ``ansible.cfg`` file is deprecated and will be removed in collection version ``3.0.0``. Instead, the section ``[hashi_vault_collection]`` can be used, which will apply to all plugins in the collection going forward (https://github.com/ansible-collections/community.hashi_vault/pull/144). + +Bugfixes +-------- + +- aws_iam_login auth - the ``aws_security_token`` option was not used, causing assumed role credentials to fail (https://github.com/ansible-collections/community.hashi_vault/issues/160). +- hashi_vault collection - a fallback import supporting the ``retries`` option for ``urllib3`` via ``requests.packages.urllib3`` was not correctly formed (https://github.com/ansible-collections/community.hashi_vault/issues/116). +- hashi_vault collection - unhandled exception with ``token`` auth when ``token_file`` exists but is a directory (https://github.com/ansible-collections/community.hashi_vault/issues/152). + +New Plugins +----------- + +Lookup +~~~~~~ + +- vault_read - Perform a read operation against HashiCorp Vault + +New Modules +----------- + +- vault_read - Perform a read operation against HashiCorp Vault + +v1.3.2 +====== + +Release Summary +--------------- + +This release adds requirements detection support for Ansible Execution Environments. It also updates and adds new guides in our `collection docsite <https://docs.ansible.com/ansible/devel/collections/community/hashi_vault>`_. +This release also announces the dropping of Python 3.5 support in version ``2.0.0`` of the collection, alongside the previous announcement dropping Python 2.x in ``2.0.0``. + +Minor Changes +------------- + +- hashi_vault collection - add ``execution-environment.yml`` and a python requirements file to better support ``ansible-builder`` (https://github.com/ansible-collections/community.hashi_vault/pull/105). + +Deprecated Features +------------------- + +- hashi_vault collection - support for Python 3.5 will be dropped in version ``2.0.0`` of ``community.hashi_vault`` (https://github.com/ansible-collections/community.hashi_vault/issues/81). + +v1.3.1 +====== + +Release Summary +--------------- + +This release fixes an error in the documentation. No functionality is changed so it's not necessary to upgrade from ``1.3.0``. + +v1.3.0 +====== + +Release Summary +--------------- + +This release adds two connection-based options for controlling timeouts and retrying failed Vault requests. + +Minor Changes +------------- + +- hashi_vault lookup - add ``retries`` and ``retry_action`` to enable built-in retry on failure (https://github.com/ansible-collections/community.hashi_vault/pull/71). +- hashi_vault lookup - add ``timeout`` option to control connection timeouts (https://github.com/ansible-collections/community.hashi_vault/pull/100). + +v1.2.0 +====== + +Release Summary +--------------- + +This release brings several new ways of accessing options, like using Ansible vars, and addng new environment variables and INI config entries. +A special ``none`` auth type is also added, for working with certain Vault Agent configurations. +This release also announces the deprecation of Python 2 support in version ``2.0.0`` of the collection. + +Minor Changes +------------- + +- hashi_vault lookup - add ``ANSIBLE_HASHI_VAULT_CA_CERT`` env var (with ``VAULT_CACERT`` low-precedence fallback) for ``ca_cert`` option (https://github.com/ansible-collections/community.hashi_vault/pull/97). +- hashi_vault lookup - add ``ANSIBLE_HASHI_VAULT_PASSWORD`` env var and ``ansible_hashi_vault_password`` ansible var for ``password`` option (https://github.com/ansible-collections/community.hashi_vault/pull/96). +- hashi_vault lookup - add ``ANSIBLE_HASHI_VAULT_USERNAME`` env var and ``ansible_hashi_vault_username`` ansible var for ``username`` option (https://github.com/ansible-collections/community.hashi_vault/pull/96). +- hashi_vault lookup - add ``ansible_hashi_vault_auth_method`` Ansible vars entry to the ``proxies`` option (https://github.com/ansible-collections/community.hashi_vault/pull/86). +- hashi_vault lookup - add ``ansible_hashi_vault_ca_cert`` ansible var for ``ca_cert`` option (https://github.com/ansible-collections/community.hashi_vault/pull/97). +- hashi_vault lookup - add ``ansible_hashi_vault_namespace`` Ansible vars entry to the ``namespace`` option (https://github.com/ansible-collections/community.hashi_vault/pull/86). +- hashi_vault lookup - add ``ansible_hashi_vault_proxies`` Ansible vars entry to the ``proxies`` option (https://github.com/ansible-collections/community.hashi_vault/pull/86). +- hashi_vault lookup - add ``ansible_hashi_vault_role_id`` Ansible vars entry to the ``proxies`` option (https://github.com/ansible-collections/community.hashi_vault/pull/86). +- hashi_vault lookup - add ``ansible_hashi_vault_secret_id`` Ansible vars entry to the ``proxies`` option (https://github.com/ansible-collections/community.hashi_vault/pull/86). +- hashi_vault lookup - add ``ansible_hashi_vault_token_file`` Ansible vars entry to the ``token_file`` option (https://github.com/ansible-collections/community.hashi_vault/pull/95). +- hashi_vault lookup - add ``ansible_hashi_vault_token_path`` Ansible vars entry to the ``token_path`` option (https://github.com/ansible-collections/community.hashi_vault/pull/95). +- hashi_vault lookup - add ``ansible_hashi_vault_token_validate`` Ansible vars entry to the ``proxies`` option (https://github.com/ansible-collections/community.hashi_vault/pull/86). +- hashi_vault lookup - add ``ansible_hashi_vault_token`` Ansible vars entry to the ``proxies`` option (https://github.com/ansible-collections/community.hashi_vault/pull/86). +- hashi_vault lookup - add ``ansible_hashi_vault_url`` and ``ansible_hashi_vault_addr`` Ansible vars entries to the ``url`` option (https://github.com/ansible-collections/community.hashi_vault/pull/86). +- hashi_vault lookup - add ``ansible_hashi_vault_validate_certs`` Ansible vars entry to the ``validate_certs`` option (https://github.com/ansible-collections/community.hashi_vault/pull/95). +- hashi_vault lookup - add ``ca_cert`` INI config file key ``ca_cert`` option (https://github.com/ansible-collections/community.hashi_vault/pull/97). +- hashi_vault lookup - add ``none`` auth type which allows for passive auth via a Vault agent (https://github.com/ansible-collections/community.hashi_vault/pull/80). + +Deprecated Features +------------------- + +- hashi_vault collection - support for Python 2 will be dropped in version ``2.0.0`` of ``community.hashi_vault`` (https://github.com/ansible-collections/community.hashi_vault/issues/81). + +v1.1.3 +====== + +Release Summary +--------------- + +This release fixes a bug with ``userpass`` authentication and ``hvac`` versions 0.9.6 and higher. + +Bugfixes +-------- + +- hashi_vault - userpass authentication did not work with hvac 0.9.6 or higher (https://github.com/ansible-collections/community.hashi_vault/pull/68). + +v1.1.2 +====== + +Release Summary +--------------- + +This release contains the same functionality as 1.1.1. The only change is to mark some code as internal to the collection. If you are already using 1.1.1 as an end user you do not need to update. + +v1.1.1 +====== + +Release Summary +--------------- + +This bugfix release restores the use of the ``VAULT_ADDR`` environment variable for setting the ``url`` option. +See the PR linked from the changelog entry for details and workarounds if you cannot upgrade. + +Bugfixes +-------- + +- hashi_vault - restore use of ``VAULT_ADDR`` environment variable as a low preference env var (https://github.com/ansible-collections/community.hashi_vault/pull/61). + +v1.1.0 +====== + +Release Summary +--------------- + +This release contains a new ``proxies`` option for the ``hashi_vault`` lookup. + +Minor Changes +------------- + +- hashi_vault - add ``proxies`` option (https://github.com/ansible-collections/community.hashi_vault/pull/50). + +v1.0.0 +====== + +Release Summary +--------------- + +Our first major release contains a single breaking change that will affect only a small subset of users. No functionality is removed. See the details in the changelog to determine if you're affected and if so how to transition to remediate. + +Breaking Changes / Porting Guide +-------------------------------- + +- hashi_vault - the ``VAULT_ADDR`` environment variable is now checked last for the ``url`` parameter. For details on which use cases are impacted, see (https://github.com/ansible-collections/community.hashi_vault/issues/8). + +v0.2.0 +====== + +Release Summary +--------------- + +Several backwards-compatible bugfixes and enhancements in this release. +Some environment variables are deprecated and have standardized replacements. + +Minor Changes +------------- + +- Add optional ``aws_iam_server_id`` parameter as the value for ``X-Vault-AWS-IAM-Server-ID`` header (https://github.com/ansible-collections/community.hashi_vault/pull/27). +- hashi_vault - ``ANSIBLE_HASHI_VAULT_ADDR`` environment variable added for option ``url`` (https://github.com/ansible-collections/community.hashi_vault/issues/8). +- hashi_vault - ``ANSIBLE_HASHI_VAULT_AUTH_METHOD`` environment variable added for option ``auth_method`` (https://github.com/ansible-collections/community.hashi_vault/issues/17). +- hashi_vault - ``ANSIBLE_HASHI_VAULT_ROLE_ID`` environment variable added for option ``role_id`` (https://github.com/ansible-collections/community.hashi_vault/issues/20). +- hashi_vault - ``ANSIBLE_HASHI_VAULT_SECRET_ID`` environment variable added for option ``secret_id`` (https://github.com/ansible-collections/community.hashi_vault/issues/20). +- hashi_vault - ``ANSIBLE_HASHI_VAULT_TOKEN_FILE`` environment variable added for option ``token_file`` (https://github.com/ansible-collections/community.hashi_vault/issues/15). +- hashi_vault - ``ANSIBLE_HASHI_VAULT_TOKEN_PATH`` environment variable added for option ``token_path`` (https://github.com/ansible-collections/community.hashi_vault/issues/15). +- hashi_vault - ``namespace`` parameter can be specified in INI or via env vars ``ANSIBLE_HASHI_VAULT_NAMESPACE`` (new) and ``VAULT_NAMESPACE`` (lower preference) (https://github.com/ansible-collections/community.hashi_vault/issues/14). +- hashi_vault - ``token`` parameter can now be specified via ``ANSIBLE_HASHI_VAULT_TOKEN`` as well as via ``VAULT_TOKEN`` (the latter with lower preference) (https://github.com/ansible-collections/community.hashi_vault/issues/16). +- hashi_vault - add ``token_validate`` option to control token validation (https://github.com/ansible-collections/community.hashi_vault/pull/24). +- hashi_vault - uses new AppRole method in hvac 0.10.6 with fallback to deprecated method with warning (https://github.com/ansible-collections/community.hashi_vault/pull/33). + +Deprecated Features +------------------- + +- hashi_vault - ``VAULT_ADDR`` environment variable for option ``url`` will have its precedence lowered in 1.0.0; use ``ANSIBLE_HASHI_VAULT_ADDR`` to intentionally override a config value (https://github.com/ansible-collections/community.hashi_vault/issues/8). +- hashi_vault - ``VAULT_AUTH_METHOD`` environment variable for option ``auth_method`` will be removed in 2.0.0, use ``ANSIBLE_HASHI_VAULT_AUTH_METHOD`` instead (https://github.com/ansible-collections/community.hashi_vault/issues/17). +- hashi_vault - ``VAULT_ROLE_ID`` environment variable for option ``role_id`` will be removed in 2.0.0, use ``ANSIBLE_HASHI_VAULT_ROLE_ID`` instead (https://github.com/ansible-collections/community.hashi_vault/issues/20). +- hashi_vault - ``VAULT_SECRET_ID`` environment variable for option ``secret_id`` will be removed in 2.0.0, use ``ANSIBLE_HASHI_VAULT_SECRET_ID`` instead (https://github.com/ansible-collections/community.hashi_vault/issues/20). +- hashi_vault - ``VAULT_TOKEN_FILE`` environment variable for option ``token_file`` will be removed in 2.0.0, use ``ANSIBLE_HASHI_VAULT_TOKEN_FILE`` instead (https://github.com/ansible-collections/community.hashi_vault/issues/15). +- hashi_vault - ``VAULT_TOKEN_PATH`` environment variable for option ``token_path`` will be removed in 2.0.0, use ``ANSIBLE_HASHI_VAULT_TOKEN_PATH`` instead (https://github.com/ansible-collections/community.hashi_vault/issues/15). + +Bugfixes +-------- + +- hashi_vault - ``mount_point`` parameter did not work with ``aws_iam_login`` auth method (https://github.com/ansible-collections/community.hashi_vault/issues/7) +- hashi_vault - fallback logic for handling deprecated style of auth in hvac was not implemented correctly (https://github.com/ansible-collections/community.hashi_vault/pull/33). +- hashi_vault - parameter ``mount_point`` does not work with JWT auth (https://github.com/ansible-collections/community.hashi_vault/issues/29). +- hashi_vault - tokens without ``lookup-self`` ability can't be used because of validation (https://github.com/ansible-collections/community.hashi_vault/issues/18). + +v0.1.0 +====== + +Release Summary +--------------- + +Our first release matches the ``hashi_vault`` lookup functionality provided by ``community.general`` version ``1.3.0``. + diff --git a/ansible_collections/community/hashi_vault/docs/docsite/rst/about_hashi_vault_lookup.rst b/ansible_collections/community/hashi_vault/docs/docsite/rst/about_hashi_vault_lookup.rst new file mode 100644 index 000000000..84bb0faec --- /dev/null +++ b/ansible_collections/community/hashi_vault/docs/docsite/rst/about_hashi_vault_lookup.rst @@ -0,0 +1,90 @@ +.. _ansible_collections.community.hashi_vault.docsite.about_hashi_vault_lookup: + +******************************** +About the ``hashi_vault`` lookup +******************************** + +This page explains the past, present, and future of the ``hashi_vault`` :ref:`lookup plugin <ansible_collections.community.hashi_vault.hashi_vault_lookup>`. + +The ``hashi_vault`` lookup is the oldest Vault-related content in Ansible. It was included in pre-collections Ansible (<2.10). As a result, it's the most used plugin for Vault, and the one most people are familiar with. + +At this time, we recommend using newer content in the collection, and we believe all use cases for ``hashi_vault`` have been covered by newer plugins. To understand the history, continue reading this document. For help with migration, :ref:`the hashi_vault migration guide <ansible_collections.community.hashi_vault.docsite.migration_hashi_vault_lookup>` has you covered. + +.. contents:: + :local: + :depth: 2 + +Synopsis +======== + +The short summary is: + +* The ``hashi_vault`` lookup does several jobs and uses some patterns that we would like to change, but are well-entrenched. +* The ``community.hashi_vault`` collection is developing and releasing new plugins and modules that are more tightly-scoped and will offer individual coverage for many use cases that the ``hashi_vault`` lookup has been used for. +* At this time, there are no plans to deprecate the ``hashi_vault`` lookup, but it is also unlikely that it will receive new features specific to that lookup (improvements in shared code like new auth methods are included automatically). +* As more plugins are released in the collection, we will be adding specific migration guidance to this page with examples. + +The long story +============== + +``hashi_vault`` lookup considerations +------------------------------------- + +Due to the history of the ``hashi_vault`` lookup plugin, it does many jobs. It is versatile, but sometimes unintuitive. + +The ``hashi_vault`` lookup plugin performs three main tasks: + +* authentication, taking parameters for various login types, performing a login, and acquiring a token with which it can make additional calls to Vault. +* a generic read operation, which allows it to read any kind of Vault path, without having to be written with that type of path in mind. +* transforming responses that look like ``kv2`` responses into simpler responses that resemble those from ``kv1``. + +Reading secrets is the most common use case, with the ``kv`` (key/value) store built into Vault as by far the most common secret store. Most implementations use v2 of the ``kv`` store. To make reading v2 ``kv`` secrets easy, the lookup plugin assumes that you're probably trying to read a ``kv`` secret, and tries to infer if the response is from ``kv2``, because the responses from version 2 include metadata and have the secret value additionally wrapped in another structure. The lookup plugin seeks to make ``kv2`` responses look more like responses from version 1. + +Since the ``kv`` store has one or more key/value pairs in each secret, the lookup also supports a non-standard suffix in its path that can be used to access a value belonging to one specific key, via the ``:keyname`` syntax. While this is useful to provide a compact way to access a single secret value (admittedly a very common use case), it complicates the implementation and leads to bad habits. + +For example, it became common to see people use many lookup invocations with the same path, each with a different ``:keyname``, to access multiple values within a single secret, but this is quite wasteful, as it does a separate login and secret lookup, all to return the same value, and the key dereferencing is done client side. Further, dereferencing can be done directly in Jinja where it's more clear what's going on, using the ``.key`` or ``['key']`` syntax. + +One last idiosyncrasy of the plugin is its support for supplying all of its parameters in the term string. This looks compact, but it greatly complicates the processing of plugin options. At the time that this lookup was created, many other lookups allowed options to be supplied in the term string, but it has since been considered an anti-pattern, and has been deprecated/removed from core plugins. + +Another downside of this is that it prevents us from effectively re-using the authentication token in cases when multiple term strings are supplied, directly or via ``with_community.hashi_vault.hashi_vault``, and as a result this type of usage results in a new login for each term. In newer lookups, we can take advantage of a single login to perform multiple operations. + +All of these considerations make sense in context, but it somewhat muddles the purpose of the lookup: + +* If a response from a completely different endpoint ended up looking like a ``kv2`` response, it would return an unexpected result. +* If you try to give the path of a ``kv2`` secret directly, it will not work unless you insert a ``/data/`` component into the path, in order to match the *API path* rather than the path people are usually familiar with. +* If you want the metadata returned along with a ``kv2`` response, you cannot get it. +* Other features of ``kv2`` like secret versioning cannot directly be used, unless you modify the URL, which is error prone and unintuitive. +* Getting access to the token created by the internal login, in order to re-use it, is not possible. + +How we are addressing the considerations +---------------------------------------- + +The built-in authentication support will be kept, and in fact it has been moved to shared utilities within the collection, so that all plguins and modules can share the functionality, and work consistently. That makes it easier to test new and existing auth methods, easier to add new ones (which automaticallly become part of all existing content), and easier to add new content, because authentication does not need to be reimplemented. + +In addition, it is now possible to perform a login directly and return the token, for general re-use, via the ``community.hashi_vault.vault_login`` :ref:`module <ansible_collections.community.hashi_vault.vault_login_module>` and :ref:`lookup plugin <ansible_collections.community.hashi_vault.vault_login_lookup>`. + +Generic read (not ``kv`` specific) is still important functionality, so we have the ``community.hashi_vault.vault_read`` :ref:`module <ansible_collections.community.hashi_vault.vault_read_module>` and :ref:`lookup plugin <ansible_collections.community.hashi_vault.vault_read_lookup>` to provide that without trying to infer whether the response is from a specific backend. + +Since reading from the ``kv`` store is by far the most common use case, we have dedicated content for that: + +* ``community.hashi_vault.vault_kv1_get`` :ref:`module <ansible_collections.community.hashi_vault.vault_kv1_get_module>` +* ``community.hashi_vault.vault_kv2_get`` :ref:`module <ansible_collections.community.hashi_vault.vault_kv2_get_module>` +* ``community.hashi_vault.vault_kv1_get`` :ref:`lookup <ansible_collections.community.hashi_vault.vault_kv1_get_lookup>` +* ``community.hashi_vault.vault_kv2_get`` :ref:`lookup <ansible_collections.community.hashi_vault.vault_kv2_get_lookup>` + +The dictionary dereferencing via ``:keyname`` syntax *will not be supported* in other content. That will be achieved in Jinja via: + +* dot syntax ``.keyname`` +* lookup syntax ``['keyname']`` +* specialized filters in some circumstances, such as the ``vault_login_token`` :ref:`filter <ansible_collections.community.hashi_vault.docsite.filter_guide.vault_login_token>`. + +Parameters via term string *will not be supported* in other lookups. Its use is discouraged by core developers, and steps have already been taken in core to remove the functionality where it still exists, however it will remain in the ``hashi_vault`` plugin for backwards compatibility and because it is likely to still be in use in a lot of places. + +The future of the ``hashi_vault`` lookup +---------------------------------------- + +There are no plans currently to deprecate or remove the ``hashi_vault`` plugin. It is likely that it will stay indefinitely, for backwards compatibility and because so much functionality has been moved to shared code that very little maintenance is required to keep it. This decision may be revisited if circumstances change. + +That being said, we will encourage the use of newer content that has functionality with a tighter scope and is expected to receive updates and enchancements as appropriate. + +New features and functionality are unlikely to be added or accepted in the ``hashi_vault`` lookup, except for the ones that come for "free", like new auth methods (these require no code changes to the plugin itself). diff --git a/ansible_collections/community/hashi_vault/docs/docsite/rst/contributor_guide.rst b/ansible_collections/community/hashi_vault/docs/docsite/rst/contributor_guide.rst new file mode 100644 index 000000000..ed5259fcd --- /dev/null +++ b/ansible_collections/community/hashi_vault/docs/docsite/rst/contributor_guide.rst @@ -0,0 +1,299 @@ +.. _ansible_collections.community.hashi_vault.docsite.contributor_guide: + +***************** +Contributor guide +***************** + +This guide aims to help anyone who wishes to contribute to the ``community.hashi_vault`` collection. + +.. note:: + + This guide can be improved with your help! Open a `GitHub issue in the repository <https://github.com/ansible-collections/community.hashi_vault/issues>`_ or contribute directly by following the instructions below. + + +.. contents:: + :local: + :depth: 3 + + +Quick start +=========== + +#. Log into your GitHub account. +#. Fork the `ansible-collections/community.hashi_vault repository <https://github.com/ansible-collections/community.hashi_vault>`_ by clicking the **Fork** button in the upper right corner. This will create a fork in your own account. +#. Clone the repository locally, following :ref:`the example instructions here <hacking_collections>` (but replace ``general`` with ``hashi_vault``). **Pay special attention to the local path structure** of the cloned repository as described in those instructions (for example ``ansible_collections/community/hashi_vault``). +#. As mentioned on that page, commit your changes to a branch, push them to your fork, and create a pull request (GitHub will automatically prompt you to do so when you look at your repository). +#. :ref:`See the guidance on Changelogs <community_changelogs>` and include a :ref:`changelog fragment <changelogs_how_to>` if appropriate. + +Contributing documentation +========================== + +Additions to the collection documentation are very welcome! We have three primary types of documentation, each with their own syntax and rules. + +README and other markdown files +------------------------------- + +Markdown files (those with the extension ``.md``) can be found in several directories within the repository. These files are primarily aimed at developers and those browsing the repository, to explain or give context to the other files nearby. + +The main exception to the above is the ``README.md`` in the repository root. This file is more important because it provides introductory information and links for anyone browsing the repository, both on GitHub and on the collection's `Ansible Galaxy page <https://galaxy.ansible.com/community/hashi_vault>`_. + +Markdown files can be previewed natively in GitHub, so they are easy to validate by reviewers, and there are no specific tests that need to run against them. + +Your IDE or code editor may also be able to preview these files. For example `Visual Studio Code has built-in markdown preview <https://code.visualstudio.com/docs/languages/markdown#_markdown-preview>`_. + +Module and plugin documentation +------------------------------- + +This type of documentation gets generated from structured YAML, inside of a Python string. It is included in the same code that it's documenting, or in a separate Python file, such as a doc fragment. Please see the :ref:`module format and documentation guidance <developing_modules_documenting>` for more information. + +This type of documentation is highly structured and tested with ``ansible-test sanity``. Full instructions are available on the :ref:`testing module documentation <testing_module_documentation>` page. + +Additionally, the docsite build on pull requests (or built locally) will include module and plugin documentation as well. See the next section for details. + +Collection docsite +------------------ + +The collection docsite is what you are reading now. It is written in reStructuredText (RST) format and published on the :ref:`ansible_documentation` site. This is where we have long-form documentation that doesn't fit into the other two categories. + +If you are considering adding an entirely new document here it may be best to open a GitHub issue first to discuss the idea and how best to organize it. + +Refer to the :ref:`Ansible style guide <style_guide>` for all submissions to the collection docsite. + +RST files for the docsite are in the ``docs/docsite/rst/`` directory. Some submissions may also require edits to ``docs/docsite/extra-docs.yml``. + +When a pull request is submitted which changes the collection's documentation, a new docsite will be generated and published to a temporary site, and a bot will post a comment on the PR with a link. This will let you see the rendered docs to help with spotting formatting errors. + +It's also possible to build the docs locally, by installing some extra Python requirements and running the build script: + +.. code-block:: shell-session + + $ pushd docs/preview + $ pip install -r requirements.txt + $ ./build.sh + +You can then find the generated HTML in ``docs/preview/build/html`` and can open them locally in your browser. + +Running tests locally +===================== + +If you're making anything more than very small or one-time changes, run the tests locally to avoid having to push a commit for each thing, and waiting for the CI to run tests. + +First, :ref:`review the guidance on testing collections <testing_collections>`, as it applies to this collection as well. + +Integration Tests +----------------- + +Unlike other collections, we require an `integration_config.yml <https://docs.ansible.com/ansible/latest/dev_guide/testing_integration.html#integration-config-yml>`_ file for properly running integration tests, as the tests require external dependencies (like a Vault server) and they need to know where to find those dependencies. + +If you have contributed to this collection or to the ``hashi_vault`` lookup plugin in the past, you might remember that the integration tests used to download, extract, and run a Vault server during the course of the tests, by default. This *legacy mode* is **no longer available**. + + +.. _ansible_collections.community.hashi_vault.docsite.contributor_guide.localenv_docker: + +Docker Compose localenv +^^^^^^^^^^^^^^^^^^^^^^^ + +The recommended way to run the tests has Vault and other dependencies running in their own containers, set up via docker-compose, and the integration tests run in their own container separately. + +We have a pre-defined "localenv" setup role for this purpose. + +Usage +""""" + +For ease of typing / length of commands, we'll enter the role directory first: + +.. code-block:: shell-session + + $ pushd tests/integration/targets/setup_localenv_docker + +This localenv has both Ansible collection and Python requirements, so let's get those out of the way: + +.. code-block:: shell-session + + $ pip install -r files/requirements/requirements.txt -c files/requirements/constraints.txt + $ ansible-galaxy collection install -r files/requirements/requirements.yml + +To set up your docker-compose environment with all the defaults: + +.. code-block:: shell-session + + $ ./setup.sh + +The setup script does the following: + +#. Template a ``docker-compose.yml`` for the project. +#. Generate a private key and self-signed certificate for Vault. +#. Template a Vault config file. +#. Bring down the existing compose project. +#. Bring up the compose project as defined by the vars (specified or defaults). +#. Template an ``integration_config.yml`` file that has all the right settings for integration tests to connect. +#. Copy the integration config to the correct location *if there isn't already one there* (it won't overwrite, in case you had customized changes). + +With your containers running, you can now run the tests in docker (after returning back to the collection root): + +.. code-block:: shell-session + + $ popd + $ ansible-test integration --docker default --docker-network hashi_vault_default -v + +The ``--docker-network`` part is important, because it ensures that the Ansible test container is in the same network as the dependency containers, that way the test container can reach them by their container names. The network name, ``hashi_vault_default`` comes from the default docker-compose project name used by this role (``hashi_vault``). See the :ref:`customization section <ansible_collections.community.hashi_vault.docsite.contributor_guide.localenv_docker_customization>` for more information. + +Running ``setup.sh`` again can be used to re-deploy the containers, or if you prefer you can use the generated ``files/.output/<project_name>/docker-compose.yml`` directly with local tools. + +If running again, remember to manually copy the contents of newly generated ``files/.output/integration_config.yml`` to the integration root, or delete the file in the root before re-running setup so that it copies the file automatically. + +.. _ansible_collections.community.hashi_vault.docsite.contributor_guide.localenv_docker_customization: + +Customization +""""""""""""" + +``setup.sh`` passes any additional params you send it to the ``ansible-playbook`` command it calls, so you can customize variables with the standard ``--extra-vars`` (or ``-e``) option. There are many advanced scenarios possible, but a few things you might want to override: + +* ``vault_version`` -- can target any version of Vault for which a docker container exists (this is the container's tag), defaults to ``latest`` +* ``docker_compose`` (defaults to ``clean`` but could be set to ``up``, ``down``, or ``none``) + * ``up`` -- similar to running ``docker-compose up`` (no op if the project is running as it should) + * ``down`` -- similar to ``docker-compose down`` (destroys the project) + * ``clean`` -- (default) similar to ``docker-compose down`` followed by ``docker-compose up`` + * ``none`` -- does the other tasks, including templating, but does not bring the project up or down. With this option, the ``community.docker`` collection is not required. +* ``vault_crypto_force`` -- by default this is ``false`` so if the cert and key exist they won't be regenerated. Setting to ``true`` will overwrite them. +* ``vault_port_http``, ``vault_port_https``, ``proxy_port`` -- all of the ports are exposed to the host, so if you already have any of the default ports in use on your host, you may need to override these. +* ``vault_container_name``, ``proxy_container_name`` -- these are the names for their respective containers, which will also be the DNS names used within the container network. In case you have the default names in use you may need to override these. +* ``docker_compose_project_name`` -- unlikely to need to be changed, but it affects the name of the docker network which will be needed for your ``ansible-test`` invocation, so it's worth mentioning. For example, if you set this to ``ansible_hashi_vault`` then the docker network name will be ``ansible_hashi_vault_default``. + +.. _ansible_collections.community.hashi_vault.docsite.contributor_guide.contributing_auth_methods: + +Contributing auth methods +========================= + +In this collection, auth methods are shared among all plugins and modules rather than being re-implemented in each one. This saves the effort of re-inventing the wheel, prevents test bloat by having to test functionality across auth methods, and provides a consistent experience. + +File location & scope +--------------------- + +Auth methods are implemented as classes in ``module_utils``, in a file named ``plugins/module_utils/_auth_method_<method_name>.py``. The leading underscore indicates that the module util is private to the collection and that it is not intended to be used outside the collection; this lets us make changes as needed without needing to release a new major version, and clearly indicates to would-be downstream users that they should not rely on these utils outside content within the collection. + +In addition, all auth method module utils within the collection must contain a comment explaining this, such as: + +.. code-block:: python + + # FOR INTERNAL COLLECTION USE ONLY + # The interfaces in this file are meant for use within the community.hashi_vault collection + # and may not remain stable to outside uses. Changes may be made in ANY release, even a bugfix release. + # See also: https://github.com/ansible/community/issues/539#issuecomment-780839686 + # Please open an issue if you have questions about this. + +It is best to look at `existing auth methods <https://github.com/ansible-collections/community.hashi_vault/tree/main/plugins/module_utils>`_ to get a feel for how they are implemented. + +Class anatomy +------------- + +Each auth method class should be named ``HashiVaultAuthMethod<MethodName>`` and inherit from ``HashiVaultAuthMethodBase``. + +The base class provides some common functionality, like standardizing a way to emit warnings and providing a common function for validating required options. + +An auth method must run the base class's ``__init__`` function. + +It must implement two methods: + +* ``validate()`` -- this method does everything it can to ensure that the requirements are met for performing authentication with this particular auth method. This may include checking for required options, validating the values of those options, pulling in additional information and context from the environment, preparing that information for use by ``authenticate()``, etc. Generally speaking, it should not contact Vault, and should minimize reliance on external sources and services, but that is a guideline and the details will depend on the specifics of the auth method in question. ``validate()`` raises an exception if validation fails. If it succeeds, nothing is returned. +* ``authenticate(client, use_token=False)`` -- this method performs the actual authentication, and it returns the API result of the authentication (which will include the token, lease information, etc.). The HVAC client object is passed in, as well an optional parameter ``use_token`` which specifies whether the client should have its token field set to the result of authentication (typically this is desired). + +The auth method class should also contain two fields: + +* ``NAME`` -- the name of the auth method. +* ``OPTIONS`` -- a list containing the name of every option that may be used by the auth method, including optional options; this list should not include the ``auth_method`` option. + +Raising exceptions and warnings +------------------------------- + +Because auth methods are shared among both Ansible modules and Ansible plugins, any exceptions raised must be applicable to both. Standard Python exceptions like ``KeyError`` can be raised if they appropriate. + +In situations where you would normally raise ``AnsibleError`` (in plugins), or call ``module.fail_json()`` (in modules), you may raise ``HashiVaultValueError`` with your error message. Plugins and modules in this collection should expect this type and act accordingly. + +Similarly for warnings, because plugins and modules implement warnings differently, module util code that needs to warn takes a warning callback, and this is true for auth methods as well. + +The base class provides a ``warn()`` method that handles calling the callback specified at class init, so a simple ``self.warn()`` can be used in auth method code. + +Accessing options +----------------- + +Because auth methods are shared among both Ansible modules and Ansible plugins, which do not access options in the same way, this collection implements a class called ``HashiVaultOptionAdapter``. This class provides a standard interface for accessing option values in code that must work in both plugins and modules. + +It implements the following methods: + +* ``get_option(key)`` -- gets the option with the specified name. Raises ``KeyError`` if the option is not present. +* ``get_option_default(key, default=None)`` -- gets the option with the specified name. If it's not present, returns the value of ``default``. +* ``set_option(key, value)`` -- sets the value of the specified option ``key`` to ``value``. +* ``set_option_default(key, default=None)`` -- returns the value of the option ``key``. If the key is not present, sets its value to ``default`` and returns that value. +* ``has_option(key)`` -- returns ``True`` if the specified option *is present* (``None`` value counts as present). +* ``set_options(**kwargs)`` -- sets options to the key/value pairs specified in ``kwargs``. +* ``get_options(*args)`` -- returns a dict of the option names specified in ``args``. +* ``get_filtered_options(filter, *args)`` -- returns a dict of the option names specified in ``args``, if the callable ``filter`` (which has ``key`` and ``value`` passed into it) returns ``True`` for the given key/value pair. +* ``get_filled_options(*args)`` -- returns a dict of the option names specified in ``*args`` that are not ``None``. + +The authenticator +----------------- + +The ``HashiVaultAuthenticator`` class is how most of the content in the collection will handle authentication, rather than having to directly references each individual auth method. As a result, ``_authenticator.py`` needs to be modified for every new auth method, because it imports and directly references each class. See `the implementation of this class <https://github.com/ansible-collections/community.hashi_vault/blob/main/plugins/module_utils/_authenticator.py>`_ to find the places that need to be modified. + +Auth method options and documentation +------------------------------------- + +Because auth methods are shared among collection content, their options are documented in doc_fragment plugins. Because many options end up being shared among many auth methods (for example ``role_id``, ``username``, ``password``), we do not have a separate doc fragment for each auth method, as this would end up with duplicated option documentation. + +Instead, all of the options for auth methods are in ``plugins/doc_fragments/auth.py``. + +This contains the standard ``DOCUMENTATION`` field, as well as a ``PLUGINS`` field. The reason for this split is that there are certain parts of the documentation that are only applicable to plugins; namely the ``env``, ``ini``, and ``vars`` entries. + +``DOCUMENTATION`` should contains all fields common to both, like ``description``, ``type``, ``version_added``, ``required``, etc., while anything plugin-specific goes in ``PLUGINS``. + +Since plugins and modules will reference the doc fragments, it's not usually required to modify the docstrings in the content directly; if it seems necessary, please raise an issue to discuss. + +Wherever possible, we should provide ``env``, ``ini``, and ``vars`` alternatives for specifying options, to give maximum flexibility for plugins. Occasionally, these won't make sense, like providing ``token`` (a sensitive value) in ``ini``. + +When deciding to implement new options for an auth method, consider whether existing options can or should be reused. If a new option is needed, consider scoping its name to the auth method, in order to differentiate it from current or future option names that could be confusing in another context. + +For example ``cert_auth_public_key`` and ``cert_auth_private_key`` were named that way to prevent them being confused with other certificate options that relate to the Vault connection, or other contexts where specific plugins or modules might need key pairs. + +Testing auth methods +-------------------- + +Because auth methods are shared across the collection, we want them to be very well tested. Auth methods have both unit and integration tests, and the combination of those should give us high confidence that the methods work as intended. + +Unit tests +^^^^^^^^^^ + +Unit tests allow us to check some of the functionality that is difficult to test effectively in integration tests, like checking that every possible combination of options behaves as it should, or simulating conditions that we can't easily reproduce. The coverage of various scenarios should be extensive, and the details of which, or how complex they are, will depend on the intricacies of the auth method itself. Looking at existing examples is highly recommended. + +A pytest fixture is provided to load fixtures files that contain sample Vault API responses. Using these allows for mocking of the HVAC authentication calls within the unit tests. + +Integration tests +^^^^^^^^^^^^^^^^^ + +Our integration tests provide a running Vault server, and with that we can set up any auth methods we want (in theory). In practice, auth methods often require external services to integrate with. When possible, we should consider setting up such external services so that we can create a meaningful, real life integration and test it. + +Often however, this is not possible, or difficult. We must consider that tests are not only run in CI, but should be able to be run locally as well. + +Mocking integrations +"""""""""""""""""""" + +We have implemented `MMock (Monster Mock) <https://github.com/jmartin82/mmock>`_ in our integration test setup to help with this. This server is setup to proxy its requests to the test Vault server, but you can write configurations that allow it to return different data for specific requests. By carefully constructing these responses, we can simulate the Vault API's response to login requests for specific auth methods, and also simulate its failures. With that, we can then run integration tests that hopefully provide us some assurance that we are implementing it correctly. + +Testing plugin and module Usage +""""""""""""""""""""""""""""""" + +Auth methods are usable from modules and plugins, so integration tests for an auth method must test it via both plugins and modules. + +We provide custom modules and plugins specifically for testing auth methods within the integration tests. These are simplified implementations but they use the common code that should be used by all content, and they can be set to return some useful information about the login process. See the existing tests for details. + +Test coverage +^^^^^^^^^^^^^ + +In CI, we use CodeCov to track coverage. We also set some specific "tags" in coverage, and one of those is to tag individual auth methods as targets for integration tests. This happens automatically in CI, however new auth methods need an entry into ``codecov.yml`` that maps the coverage flag to the file where the auth method is implemented. For example: + +.. code:: yaml + + flags: + target_auth_aws_iam: + paths: + - plugins/module_utils/_auth_method_aws_iam.py diff --git a/ansible_collections/community/hashi_vault/docs/docsite/rst/filter_guide.rst b/ansible_collections/community/hashi_vault/docs/docsite/rst/filter_guide.rst new file mode 100644 index 000000000..8cee2b987 --- /dev/null +++ b/ansible_collections/community/hashi_vault/docs/docsite/rst/filter_guide.rst @@ -0,0 +1,139 @@ +.. _ansible_collections.community.hashi_vault.docsite.filter_guide: + +Filter guide +============ + +.. note:: + + Filter Plugins are now included with other :ref:`plugin documentation <plugins_in_community.hashi_vault>`. + + +.. contents:: Filters + +.. _ansible_collections.community.hashi_vault.docsite.filter_guide.vault_login_token: + +``vault_login_token`` filter +---------------------------- + +.. versionadded:: 2.2.0 + +The ``vault_login_token`` filter extracts the token value from the structure returned by a Vault token creation operation, such as those returned by the ``community.hashi_vault.vault_login`` :ref:`module <ansible_collections.community.hashi_vault.vault_login_module>` or :ref:`lookup plugin <ansible_collections.community.hashi_vault.vault_login_lookup>`, or the ``community.hashi_vault.vault_token_create`` :ref:`module <ansible_collections.community.hashi_vault.vault_token_create_module>` or :ref:`lookup plugin <ansible_collections.community.hashi_vault.vault_token_create_lookup>`. + +The filter takes an optional parameter ``optional_field`` with defaults to ``login``. If this field exists in the input dictionary, then the value of that field is taken the be the login response, rather than the input dictionary itself. + +The purpose of this is primarily to deal with the difference between the output of lookup plugins (which return the login response directly) and modules, which return the login response in a ``login`` field in its return. + +Here is a sample login response: + +.. code-block:: json + + { + "auth": { + "accessor": "mQewzgKRx5Yui1h1eMemJlMu", + "client_token": "s.drgLxu6ZtttSVn5Zkoy0huMR", + "entity_id": "8a74ffd3-f71b-8ebe-7942-610428051ea9", + "lease_duration": 3600, + "metadata": { + "username": "testuser" + }, + "orphan": true, + "policies": [ + "alt-policy", + "default", + "userpass-policy" + ], + "renewable": true, + "token_policies": [ + "alt-policy", + "default", + "userpass-policy" + ], + "token_type": "service" + }, + "data": null, + "lease_duration": 0, + "lease_id": "", + "renewable": false, + "request_id": "511e8fba-83f0-4b7e-95ea-770aa19c1957", + "warnings": null, + "wrap_info": null + } + +The token that we want to extract is in ``auth.client_token``. + +Here's an example usage with the ``vault_login`` module and lookup. + +.. code-block:: yaml+jinja + + - name: Set defaults + vars: + ansible_hashi_vault_url: https://vault:9801/ + ansible_hashi_vault_auth_method: userpass + ansible_hashi_vault_username: user + ansible_hashi_vault_password: "{{ lookup('env', 'MY_SECRET_PASSWORD') }}" + module_defaults: + community.hashi_vault.vault_login: + url: '{{ ansible_hashi_vault_url }}' + auth_method: '{{ ansible_hashi_vault_auth_method }}' + username: '{{ ansible_hashi_vault_username }}' + password: '{{ ansible_hashi_vault_password }}' + block: + - name: Perform a login with a lookup and display the token + vars: + login_response: "{{ lookup('community.hashi_vault.vault_login') }}" + debug: + msg: "The token is {{ login_response | community.hashi_vault.vault_login_token }}" + + - name: Perform a login with a module + community.hashi_vault.vault_login: + register: login_response + + - name: Display the token + debug: + msg: "The token is {{ login_response | community.hashi_vault.vault_login_token }}" + +Which produces: + +.. code-block:: ansible-output + + TASK [Perform a login with a lookup and display the token] ******************************** + ok: [localhost] => { + "msg": "s.drgLxu6ZtttSVn5Zkoy0huMR" + } + + TASK [Perform a login with a module] ***************************************************** + ok: [localhost] => {"changed": true, "login": {"auth": { "accessor": "mQewzgKRx5Yui1h1eMemJlMu", + "client_token": "s.drgLxu6ZtttSVn5Zkoy0huMR", "entity_id": "8a74ffd3-f71b-8ebe-7942-610428051ea9", + "lease_duration": 3600, "metadata": {"username": "testuser"}, "orphan": true, "policies": + ["alt-policy", "default", "userpass-policy"], "renewable": true, "token_policies": ["alt-policy", + "default", "userpass-policy"], "token_type": "service"}, "data": null, "lease_duration": 0, + "lease_id": "", "renewable": false, "request_id": "511e8fba-83f0-4b7e-95ea-770aa19c1957", + "warnings": null, "wrap_info": null}} + } + + TASK [Display the token] ***************************************************************** + ok: [localhost] => { + "msg": "s.drgLxu6ZtttSVn5Zkoy0huMR" + } + +This filter is the equivalent of reading into the dictionary directly, but it has the advantages of providing semantic meaning and automatically working against the differing output of modules and lookups. + +.. code-block:: yaml+jinja + + --- + lookup_token: "{{ lookup_login_response['auth']['client_token'] }}" + module_token: "{{ module_login_response['login']['auth']['client_token'] }}" + +The ``optional_field`` can be changed in case you've put the raw login response in some other structure, but you could also dereference that directly instead. + +.. code-block:: yaml+jinja + + --- + my_data: + something: somedata + vault_login: "{{ lookup_login_response }}" + + token_from_param: "{{ my_data | community.hashi_vault.vault_login_token(optional_field='vault_login') }}" + token_from_deref: "{{ my_data['vault_login'] | community.hashi_vault.vault_login_token }}" + # if the optional field doesn't exist, the dictionary itself is still checked + unused_optional: "{{ my_data['vault_login'] | community.hashi_vault.vault_login_token(optional_field='missing') }}" diff --git a/ansible_collections/community/hashi_vault/docs/docsite/rst/localenv_developer_guide.rst b/ansible_collections/community/hashi_vault/docs/docsite/rst/localenv_developer_guide.rst new file mode 100644 index 000000000..0b111d564 --- /dev/null +++ b/ansible_collections/community/hashi_vault/docs/docsite/rst/localenv_developer_guide.rst @@ -0,0 +1,93 @@ +.. _ansible_collections.community.hashi_vault.docsite.localenv_developer_guide: + +************************ +localenv developer guide +************************ + +A "localenv" role in this collection sets up the external dependencies required to run the integration tests. The idea is to provide a pre-packaged way for a contributor to set up their local environment in a consistent, repeatable way. + +.. note:: + + This guide is a work-in-progress and is **very** light on details. For the time being, it's best to open an issue in the repository to discuss it if you're thinking of a new localenv. Looking at ``setup_localenv_docker`` should also be helpful as it's the most complete one to date. + + +.. contents:: + :local: + :depth: 2 + + +Required external dependencies +============================== + +HashiCorp Vault +--------------- + +A Vault server is required for the integration tests. Using `Vault Dev Server Mode <https://www.vaultproject.io/docs/concepts/dev-server>`_ is recommended as it's the easiest and fastest way to get a server started. + +A unencrypted (plain HTTP) listener is *required* for our purposes as most of the tests will expect to connect that way. + +To run the tests that deal specifically with TLS/HTTPS access, you must start the Vault server with a TLS enabled listener. The TLS address:port, and the CA cert (or the cert itself if self-signed) must be supplied. + +The **root token** of the Vault server is needed, as the integration tests make changes to Vault's configuration, and expect to have that token available to do so. It's possible to let Vault generate the token on startup and then retrieve it but it may be easiest to pre-generate one and pass it into Vault, via the ``-dev-root-token-id`` option or ``VAULT_DEV_ROOT_TOKEN_ID`` environment variable (see `Dev Options <https://www.vaultproject.io/docs/commands/server#dev-options>`_). + +Relevant ``integration_config.yml`` variables +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. csv-table:: + :header: "var", "example", "description" + :widths: 15, 20, 65 + + "``vault_test_server_http``", "``http://myvault:8200``", "The full HTTP URL of your Vault test server." + "``vault_test_server_https``", "``https://myvault:8300``", "The full HTTPS URL of your Vault test server." + "``vault_dev_root_token_id``", "``3ee9a1f7-f115-4f7c-90a3-d3c73361bcb5``", "The root token used to authenticate to Vault." + "``vault_version``", "``1.7.3``", "The version of Vault in use (usually this is written by a localenv, so a value set manually is not used anywhere)." + "``vault_cert_content``", "``-----BEGIN CERTIFICATE-----<snip>``", "The public cert of the CA that signed the cert used for Vault's TLS listener (or the cert itself if self-signed)." + + +Proxy server +------------ + +A proxy server is used to test the proxy connectivity options. + +In theory any proxy supporting http/s targets could be used for this purpose, but `tinyproxy <https://github.com/tinyproxy/tinyproxy>`_ is recommended for being, well.. tiny, as well as easy to configure and run, and available in package managers and containers. + +Relevant ``integration_config.yml`` variables +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. csv-table:: + :header: "var", "example", "description" + :widths: 15, 20, 65 + + "``vault_proxy_server``", "``http://proxy:8080``", "The full HTTP URL of your proxy server." + + +MMock server +------------ + +`MMock (short for Monster Mock) <https://github.com/jmartin82/mmock>`_ is an HTTP server designed for mocking HTTP responses. It can also transparently proxy through to a real server. We use it to proxy our test Vault server while intercepting certain API calls to Vault and returning mocked responses. + +This is useful for Vault integrations that are more difficult to set up in our CI environment. + +For example, we use this for testing the ``aws_iam`` auth method, since we don't have an AWS account we can use and configure and connect to from our GitHub CI. + +For these integration tests, all Vault interactions are directed to MMock rather than directly to Vault, and we pre-configure MMock to respond to the relevant calls in a way that models a real Vault server's success and failure responses. + +Relevant ``integration_config.yml`` variables +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. csv-table:: + :header: "var", "example", "description" + :widths: 15, 20, 65 + + "``vault_mmock_server_http``", "``http://mmock:8900``", "The full HTTP URL of the MMock server." + + +localenv role conventions +========================= + +* Use ``files/.output`` to hold generated artifacts. +* Anything generated should be in a ``.gitignore``; conversely anything not in a ``.gitignore`` should not be overwritten or modified by this process. That is, there should be no changes to git status that arise from this. +* Consider providing a ``setup.sh`` to avoid having to manually run ``ansible-`` commands. It should ideally operate correctly regardless of the current working directory. +* Generate a usable ``integration_config.yml`` that allows for using the result of the localenv. Generate it within the role output, not outside the role. Copy it to the right location, but do not overwrite an existing one. +* If the role has external dependencies, try to codify those in file(s) that can be used by the right tool, like ``requirements.yml`` for ``ansible-galaxy``, etc. +* localenv roles are meant to run **outside** of the ``ansible-test`` environment, but they can make (re)use of other roles. diff --git a/ansible_collections/community/hashi_vault/docs/docsite/rst/lookup_guide.rst b/ansible_collections/community/hashi_vault/docs/docsite/rst/lookup_guide.rst new file mode 100644 index 000000000..da73854b4 --- /dev/null +++ b/ansible_collections/community/hashi_vault/docs/docsite/rst/lookup_guide.rst @@ -0,0 +1,99 @@ +.. _ansible_collections.community.hashi_vault.docsite.lookup_guide: + +************ +Lookup guide +************ + +This guide is not a comprehensive listing of included lookup plugins and how to use them, rather it is intended to explain the role of the lookup plugins in ``community.hashi_vault`` and how they are they used, especially when compared to modules of the same name. + +For information about the ``hashi_vault`` lookup specifically, see :ref:`this page that covers it in detail <ansible_collections.community.hashi_vault.docsite.about_hashi_vault_lookup>`. + +.. contents:: + :local: + :depth: 2 + + +Lookups and writes +================== + +Most Ansible lookups perform read-only, non-destructive operations. They are run in templating, they generally *return* values, and they **do not run differently in check mode** (that is they do the same thing they would in normal mode, even if that means changing something). However, some lookups do change state, sometimes by performing write operations. For example, the ``password`` :ref:`lookup <ansible_collections.ansible.builtin.password_lookup>` writes a generated password to a file, to act as a sort of cache, and the ``pipe`` :ref:`lookup <ansible_collections.ansible.builtin.pipe_lookup>` runs an arbitrary shell command so it could easily write or change state. + +Writes in Vault +--------------- + +Operations that perform writes in Vault are not limited to the obvious ones such as writing a secret value, creating a policy, or enabling a new auth method. + +Any operation that creates a token for example, such as any login operation, is also a write; tokens use storage in Vault and having too many active tokens is a common cause of performance problems. + +Additionally, some values in Vault can only be "read" at the moment of their creation, and so the only way to retrieve such a value, is to get it as a response from the "write" that created it. A common example is AppRole secret IDs. + +The way this relates to Ansible and this collection, is that we may have lookup plugins that either unintuitively perform writes (like ``vault_login``), or appear inappropriate to exist as lookups in the first place, like the planned ``vault_write`` lookup. + +The reason for this is that the we often consider these operations to be logical "read" operations, like performing a login, and want to use their results in other expressions. + +Something like ``vault_write`` does not always fit that description, because you could use it in a way that is clearly an explicit write, for example you could create a new policy with the lookup. But there are times it may be appropriate to use it in lookup semantics, like when "retrieving" (really creating) a new secret ID for an approle. + +When considering built-in support for auth methods, any auth method other than ``token`` or ``none`` makes every lookup, even ``vault_read``, into something that's changing state and performing a write within Vault. This actually applies to many modules too, even when using check mode. + +How to reason about when to use lookups +--------------------------------------- + +Because there is potential for writes in any lookup, it is very important to carefully consider when you are using a lookup vs. a module/other plugin. Check mode has no effect on lookups, so there is potential to perform many writes within your check mode run, but maybe sometimes you want that, for example if you're performing a ``vault_login`` via lookup to retrieve a token to use in your module calls, you may want that to still happen in check mode so that your module calls can properly check the things they need to. + +Some modules that are read focused, like the ``vault_read`` module, when used with auth other than ``token`` or ``none``, will still perform an internal login even in check mode, so this is still another consideration. + +Lookups and lazy templating +--------------------------- + +The capacity for lookups to perform writes or change state is exacerbated by Ansible's "lazy" templating, if not used carefully. + +Consider the following example: + +.. code-block:: yaml+jinja + + - vars: + token: "{{ lookup('community.hashi_vault.vault_login', auth_method='userpass', username='user', password='pass') | community.hashi_vault.vault_login_token }}" + secret: "{{ lookup('community.hashi_vault.vault_read', 'secrets/data/my-secret', token=token) }}" + value_a: "{{ secret.data.data.a }}" + value_b: "{{ secret.data.data.b }}" + ansible.builtin.debug: + msg: "Secret value A is '{{ value_a }}' while value B is '{{ value_b }}'." + +Since templating is recursive and evaluated lazily, this will unfortunately *not* result in a single login, reusing the token to perform a single secret read, which is then used is dictionary lookups. + +Instead, evaluation of ``value_a`` and ``value_b`` will *each* cause separate evaluation of ``secret``, so that lookup will be performed twice, and *each of those lookups* will cause a separate evaluation of ``token``, which will perform two separate logins, resulting in two tokens being created, and two reads of the exact same secret being performed. + +If you combine this with loops, or reusing vars over multiple tasks, you can very quickly multiply the number of requests being made to Vault, and in the case of writes, the number of objects being created. + +Tasks can be better for this, since they execute when encountered without being accidentally repeated, and the values they return are static. + +.. code-block:: yaml+jinja + + - name: login + community.hashi_vault.vault_login: + auth_method: userpass + username: user + password: pass + register: login + + - name: get secret + community.hashi_vault.vault_read: + token: '{{ login | community.hashi_vault.vault_login_token }}' + path: 'secrets/data/my-secret' + register: secret + + - vars: + value_a: "{{ secret.data.data.data.a }}" + value_b: "{{ secret.data.data.data.b }}" + ansible.builtin.debug: + msg: "Secret value A is '{{ value_a }}' while value B is '{{ value_b }}'." + +This example will do a single login and secret lookup, even though it is more verbose. It also means the ``secret`` and ``login`` variables can be re-used in more tasks without performing additional requests to Vault. + +Another thing to consider in both of the examples is that tasks run *per host*, so you may be multiplying the requests yet again. + +In the lookup example, those requests all happen on the controller, and in the module example, they happen on the remote host unless the play or task is targeted locally. + +In both cases, you may *want* to make those requests per host, because some of the variables involved in the lookups may rely on per-host values, like differing authentication, different secret paths, even different Vault servers altogether, or in the case of certain access restrictions, you may need the remote host to make the connection rather than the controller. + +But if all of your secret access is intended to be from the controller, and the requests do not depend on host-level variables, you can potentially cut your requests by a lot, by using ``run_once``, or making Vault calls in a separate play that only targets ``localhost`` and using ``ansible.builtin.set_fact``, or via other methods. diff --git a/ansible_collections/community/hashi_vault/docs/docsite/rst/migration_hashi_vault_lookup.rst b/ansible_collections/community/hashi_vault/docs/docsite/rst/migration_hashi_vault_lookup.rst new file mode 100644 index 000000000..9c206bcde --- /dev/null +++ b/ansible_collections/community/hashi_vault/docs/docsite/rst/migration_hashi_vault_lookup.rst @@ -0,0 +1,369 @@ +.. _ansible_collections.community.hashi_vault.docsite.migration_hashi_vault_lookup: + +***************************************** +Migrating from the ``hashi_vault`` lookup +***************************************** + +This is a guide for migrating from the ``hashi_vault`` :ref:`lookup plugin <ansible_collections.community.hashi_vault.hashi_vault_lookup>` to newer content in this collection. + +To understand why, please see :ref:`this page describing the plugin's history and future <ansible_collections.community.hashi_vault.docsite.about_hashi_vault_lookup>`. + +.. contents:: + :local: + :depth: 2 + +A note about lookups vs. modules +================================ + +Since the ``hashi_vault`` plugin is a lookup, it is often most straightforward to replace its use with other lookups. There was no module option available previously, however there is now. + +Although it may be more involved, consider each use case to determine if a module is more appropriate. + +For more information, see the :ref:`lookup guide <ansible_collections.community.hashi_vault.docsite.lookup_guide>`. + +General changes +=============== + +This section will cover some general differences not related to specific scenarios. + +Options: direct vs. term string +------------------------------- + +For a long time, the ``hashi_vault`` lookup took all of its options as ``name=value`` strings inside the term string, so you would do a lookup with a single string that looked something like ``secret/data/path auth_method=userpass username=my_user password=somepass``. + +This way of passing options is discouraged, and ``hashi_vault`` was updated (before this collection existed) to support passing options as individual keyword arguments. The term string method was kept for backward compatibility. + +.. note:: + + None of the other lookups in this collection will support the old style term string syntax, so changing to direct options is highly recommended. + +If your existing lookups use options in the term string, you may want to first change to direct use of options before trying to change the plugin, **especially if you intend to continue using lookups instead of modules**. + +Examples of the term string style: + +.. code-block:: yaml+jinja + + - name: Term string style + vars: + user: my_user + pass: '{{ my_secret_password }}' + mount: secret + relpath: path + ansible.builtin.debug: + msg: + - "Static: {{ lookup('community.hashi_vault.hashi_vault', 'secret/data/path auth_method=userpass username=my_user password=somepass') }}" + - "Variables: {{ lookup('community.hashi_vault.hashi_vault', mount ~ '/data/' ~ path ~ ' auth_method=userpass username=' ~ user ~ ' password=' ~ pass) }}" + # note these necessary but easy to miss spaces ^ ^ + +And the same lookups converted to direct options: + +.. code-block:: yaml+jinja + + - name: Direct option style + vars: + user: my_user + pass: '{{ my_secret_password }}' + mount: secret + relpath: path + ansible.builtin.debug: + msg: + - "Static: {{ lookup('community.hashi_vault.hashi_vault', 'secret/data/path', auth_method='userpass', username='my_user', password='somepass') }}" + - "Variables: {{ lookup('community.hashi_vault.hashi_vault', mount ~ '/data/' ~ path, auth_method='userpass', username=user, password=pass) }}" + + +Key dereferencing +----------------- + +For these examples we will assume our result dictionary has this structure: + +.. code-block:: yaml + + key_1: value1 + 'key-2': 2 + 'key three': three + + +``hashi_vault`` also supported a dictionary dereferencing syntax with colon ``:``, so it was common to see this: + +.. code-block:: yaml+jinja + + - ansible.builtin.debug: + msg: + - "KV1 (key1): {{ lookup('community.hashi_vault.hashi_vault', 'kv1_mount/path/to/secret:key_1') }}" + - "KV2 (key1): {{ lookup('community.hashi_vault.hashi_vault', 'kv2_mount/data/path/to/secret:key_1') }}" + +With the above syntax, only the *value* of ``key_1`` is returned. Note that ``key three`` could not have been retrieved this way, because the space was the delimiter for the term string options. + +.. note:: + + The colon ``:`` syntax is not supported in any other lookups in the collection, and its use is discouraged. + +**Colon** ``:`` **use does not correspond to any server-side filtering or other optimization**, so other than compact syntax there is there no advantage to using it. + +The colon ``:`` syntax could always have been replaced by directly dereferencing in the Jinja2 template. Direct dereferencing can be done with the Jinja2 dot ``.`` syntax (which has restrictions on the key names) or via square brackets ``[]``, like so (KV version does not matter): + +.. code-block:: yaml+jinja + + - vars: + k1: key_1 + k2: key-2 + k3: key three + ansible.builtin.debug: + msg: + - "KV1 (key1, dot): {{ lookup('community.hashi_vault.hashi_vault', 'kv1_mount/path/to/secret').key_1 }}" + - "KV1 (key1, [ ]): {{ lookup('community.hashi_vault.hashi_vault', 'kv1_mount/path/to/secret')['key_1'] }}" + - "KV1 (var1, [ ]): {{ lookup('community.hashi_vault.hashi_vault', 'kv1_mount/path/to/secret')[k1] }}" + - "KV1 (key2, [ ]): {{ lookup('community.hashi_vault.hashi_vault', 'kv1_mount/path/to/secret')['key-2'] }}" + - "KV1 (var2, [ ]): {{ lookup('community.hashi_vault.hashi_vault', 'kv1_mount/path/to/secret')[k2] }}" + - "KV1 (key3, [ ]): {{ lookup('community.hashi_vault.hashi_vault', 'kv1_mount/path/to/secret')['key three'] }}" + - "KV1 (var3, [ ]): {{ lookup('community.hashi_vault.hashi_vault', 'kv1_mount/path/to/secret')[k3] }}" + +Note that only ``key_1`` could use the dot ``.`` syntax because the allowed characters for that are limited to those allowed for Python symbols. Variables also cannot be used with dot ``.`` access. + +Furthermore, the colon ``:`` syntax encouraged multiple lookups to the same secret only for the purpose of getting different keys, leading to multiple identical requests to Vault. **The above example also suffers from this**. + +A more DRY approach might look like this: + +.. code-block:: yaml+jinja + + - vars: + secret: "{{ lookup('community.hashi_vault.hashi_vault', 'kv1_mount/path/to/secret') }}" + k1: key_1 + k2: key-2 + k3: key three + ansible.builtin.debug: + msg: + - "KV1 (key1, dot): {{ secret.key_1 }}" + - "KV1 (key1, [ ]): {{ secret['key_1'] }}" + - "KV1 (var1, [ ]): {{ secret[k1] }}" + - "KV1 (key2, [ ]): {{ secret['key-2'] }}" + - "KV1 (var2, [ ]): {{ secret[k2] }}" + - "KV1 (key3, [ ]): {{ secret['key three'] }}" + - "KV1 (var3, [ ]): {{ secret[k3] }}" + +This looks a lot better, and it is from a readability perspective, but **in fact it will operate exactly the same way**, making a new request on every reference to ``secret``. This is due to lazy template evaluation in Ansible, and is discussed in more detail in the :ref:`lookup guide <ansible_collections.community.hashi_vault.docsite.lookup_guide>`. This can be remedied by either using ``ansible.builtin.set_fact`` to set the ``secret`` variable, or by using a module to do the read. + +If you have extensive use of the colon ``:`` syntax, updating it before moving onto other plugins is recommended. + +Return format +------------- + +.. note:: + + The ``return_format`` option will not be supported in other plugins. It is recommended to replace it with Jinja2 if you are using it currently. + +The ``hashi_vault`` lookup takes a ``return_format`` option that defaults to ``dict``. The lookup always looks for a ``data`` field (see the :ref:`KV response details <ansible_collections.community.hashi_vault.docsite.migration_hashi_vault_lookup.kv_response>` for more information), and that is what is returned by default. + +The ``raw`` value for ``return_format`` gives the raw API response from the request. This can be used to get the metadata from a KV2 request for example, which is usually stripped off, or it can be used to read from a non-KV path whose response happens to look like a KV response (with one or more ``data`` structures), and gets interpreted as one as a result. + +For reading non-KV paths :ref:`other options are available <ansible_collections.community.hashi_vault.docsite.migration_hashi_vault_lookup.non_kv_replacements>`. + +For getting access to KV2 metadata, see the section on :ref:`KV replacements <ansible_collections.community.hashi_vault.docsite.migration_hashi_vault_lookup.kv_replacements>`. + +The ``return_format`` option can also be set to ``values`` to return a list of the dictionary's values. + +This can be replaced with Jinja2. We will use our example secret again: + +.. code-block:: yaml + + key_1: value1 + 'key-2': 2 + 'key three': three + +And look at uses with ``return_format``: + +.. code-block:: yaml+jinja + + # show a list of values, ['value1', 2, 'three'] + - ansible.builtin.debug: + msg: + - "KV1: {{ lookup('community.hashi_vault.hashi_vault', 'kv1_mount/path/to/secret', return_format='values') }}" + + # run debug once for each value + - ansible.builtin.debug: + msg: "{{ item }}" + loop: "{{ query('community.hashi_vault.hashi_vault', 'kv1_mount/path/to/secret', return_format='values') }}" + +We can do the same with Jinja2: + +.. code-block:: yaml+jinja + + # show a list of values + - ansible.builtin.debug: + msg: + - "KV1: {{ lookup('community.hashi_vault.hashi_vault', 'kv1_mount/path/to/secret').values() | list }}" + + # run debug once for each value + - ansible.builtin.debug: + msg: "{{ item }}" + loop: "{{ lookup('community.hashi_vault.hashi_vault', 'kv1_mount/path/to/secret').values() | list }}" + + +Vault KV reads +============== + +The most common use for the ``hashi_vault`` lookup is reading secrets from the KV secret store. + +.. code-block:: yaml+jinja + + - ansible.builtin.debug: + msg: + - "KV1: {{ lookup('community.hashi_vault.hashi_vault', 'kv1_mount/path/to/secret') }}" + - "KV2: {{ lookup('community.hashi_vault.hashi_vault', 'kv2_mount/data/path/to/secret') }}" + +The return value of both of those is the dictionary of the key/value pairs in the secret, with no additional information from the API response, nor the metadata (in the case of KV2). + +.. _ansible_collections.community.hashi_vault.docsite.migration_hashi_vault_lookup.kv_response: + +KV1 and KV2 response structure +------------------------------ + +Under the hood, the return format of version 1 and version 2 of the KV store differs. + +Here is a sample KV1 response: + +.. code-block:: json + + { + "auth": null, + "data": { + "Key1": "val1", + "Key2": "val2" + }, + "lease_duration": 2764800, + "lease_id": "", + "renewable": false, + "request_id": "e26a7521-e512-82f1-3998-7cc494f14e86", + "warnings": null, + "wrap_info": null + } + +And a sample KV2 response: + +.. code-block:: json + + { + "auth": null, + "data": { + "data": { + "Key1": "val1", + "Key2": "val2" + }, + "metadata": { + "created_time": "2022-04-21T15:56:58.8525402Z", + "custom_metadata": null, + "deletion_time": "", + "destroyed": false, + "version": 2 + } + }, + "lease_duration": 0, + "lease_id": "", + "renewable": false, + "request_id": "15538d55-0ad9-1c39-2f4b-dcbb982f13cc", + "warnings": null, + "wrap_info": null + } + +The ``hashi_vault`` lookup traditionally returned the ``data`` field of whatever it was reading, and then later the plugin was updated to its current behavior, where it looks for the nested ``data.data`` structure, and if found, it returns only the inner ``data``. This aims to always return the secret data from KV1 and KV2 in a consistent format, but it means any additional information from KV2's metadata could not be accessed. + +KV1 and KV2 API paths +--------------------- + +KV1's API path had the secret paths directly concatenated to the mount point. So for example, if a KV1 engine is mounted at ``kv/v/1`` (mount paths can contain ``/``), and a secret was created in that store at ``app/deploy_key``, the path would be ``kv/v/1/app/deploy_key``. + +In KV2, there are separate paths that deal with the data and the metadata of a secret, so an additional ``/data/`` or ``/metadata/`` component needs to be inserted between the mount and the path. + +For example with a KV2 store mounted at ``kv/v/2``, and a secret at ``app/deploy_key``, the path to read the secret data is ``kv/v/2/data/app/deploy_key``. For metadata operations it would be ``kv/v/2/metadata/app/deploy_key``. + +Since ``hashi_vault`` does a generic read to an API path, anyone using it must know to insert those into the path, which causes a lot of confusion. + +KV2 secret vesions +------------------ + +Since KV2 is a versioned secret store, multiple versions of the same secret usually exist. There was no dedicated way to get anything but the latest secret (default) with the ``hashi_vault`` lookup, but docs suggested that ``?version=2`` could be added to the path to get secret version 2. This did work but it directly modified the API path, so it was not considered a stable option. The dedicated KV2 content in the collection supports this as a first class option. + + +.. _ansible_collections.community.hashi_vault.docsite.migration_hashi_vault_lookup.kv_replacements: + +KV get replacements +------------------- + +As of collection version 2.5.0, the ``vault_kv1_get`` and ``vault_kv2_get`` lookups and modules were added: + + * ``vault_kv1_get`` :ref:`lookup <ansible_collections.community.hashi_vault.vault_kv1_get_lookup>` + * ``vault_kv2_get`` :ref:`lookup <ansible_collections.community.hashi_vault.vault_kv2_get_lookup>` + * ``vault_kv1_get`` :ref:`module <ansible_collections.community.hashi_vault.vault_kv1_get_module>` + * ``vault_kv2_get`` :ref:`module <ansible_collections.community.hashi_vault.vault_kv2_get_module>` + +These dedicated plugins clearly separate KV1 and KV2 operations. This ensures their behavior is clear and predictable. + +As it relates to API paths, these plugins take the approach of most Vault client libraries, and recommended by HashiCorp, which is to accept the mount point as an option (``engine_mount_point``), separate from the path to be read. This ensures a proper path will be constructed internally, and does not require the caller to insert ``/data/`` on KV2. + +For return values, the KV plugins no longer return a direct secret. Instead, the return values from KV1 and KV2, and both the module and lookup forms, have been unified to give easy access to the secret, the full API response, and other parts of the response discretely. + +The return values are covered directly in the documentation for each plugin in the return and examples sections. + +Examples +-------- + +Here are some before and after KV examples. + +We will go back to our sample secret: + +.. code-block:: yaml + + key_1: value1 + 'key-2': 2 + 'key three': three + +And some usage: + +.. code-block:: yaml+jinja + + - name: Reading secrets with hashi_vault and colon dereferencing + ansible.builtin.debug: + msg: + - "KV1 (key1): {{ lookup('community.hashi_vault.hashi_vault', 'kv1_mount/path/to/secret:key_1') }}" + - "KV2 (key1): {{ lookup('community.hashi_vault.hashi_vault', 'kv2_mount/data/path/to/secret:key_1') }}" + + - name: Replacing the above + ansible.builtin.debug: + msg: + - "KV1 (key1): {{ lookup('community.hashi_vault.vault_kv1_get', 'path/to/secret', engine_mount_point='kv1_mount').secret.key_1 }}" + - "KV2 (key1): {{ lookup('community.hashi_vault.vault_kv2_get', 'path/to/secret', engine_mount_point='kv2_mount').secret.key_1 }}" + + - name: Reading secret version 7 (old) + ansible.builtin.debug: + msg: + - "KV2 (v7): {{ lookup('community.hashi_vault.hashi_vault', 'kv2_mount/data/path/to/secret?version=7') }}" + + - name: Reading secret version 7 (new) + ansible.builtin.debug: + msg: + - "KV2 (v7): {{ lookup('community.hashi_vault.vault_kv2_get', 'path/to/secret', engine_mount_point='kv2_mount', version=7).secret }}" + + - name: Reading KV2 metadata (old) + ansible.builtin.debug: + msg: + - "KV2 (metadata): {{ lookup('community.hashi_vault.hashi_vault', 'kv2_mount/data/path/to/secret', return_format='raw').data.metadata }}" + + - name: Reading KV2 metadata (new) + ansible.builtin.debug: + msg: + - "KV2 (metadata): {{ lookup('community.hashi_vault.vault_kv2_get', 'path/to/secret', engine_mount_point='kv2_mount').metadata }}" + + +.. _ansible_collections.community.hashi_vault.docsite.migration_hashi_vault_lookup.non_kv_replacements: + +General reads (non-KV) +====================== + +Since the ``hashi_vault`` lookup does a generic read internally, it can be used to read other paths that are not KV-specifc, for example reading from a cubbyhole or retrieving an AppRole's role ID. + +More specific-purpose content is expected in the future, for example plugins for retrieving a role ID, but for anything not covered right now, we have the ``vault_read`` lookup and module: + + * ``vault_read`` :ref:`lookup <ansible_collections.community.hashi_vault.vault_read_lookup>` + * ``vault_read`` :ref:`module <ansible_collections.community.hashi_vault.vault_read_module>` + +These always do a direct read, and return a raw result, without trying to do any additional interpretation of the response. See their documentation for examples. diff --git a/ansible_collections/community/hashi_vault/docs/docsite/rst/user_guide.rst b/ansible_collections/community/hashi_vault/docs/docsite/rst/user_guide.rst new file mode 100644 index 000000000..a3f417800 --- /dev/null +++ b/ansible_collections/community/hashi_vault/docs/docsite/rst/user_guide.rst @@ -0,0 +1,90 @@ +.. _ansible_collections.community.hashi_vault.docsite.user_guide: + +********** +User guide +********** + +The `community.hashi_vault collection <https://galaxy.ansible.com/community/hashi_vault>`_ offers Ansible content for working with `HashiCorp Vault <https://www.vaultproject.io/>`_. + +.. note:: + + This guide is a work-in-progress and should not be considered complete. Use it in conjunction with plugin documentation. + +.. contents:: + :local: + :depth: 1 + + +.. _ansible_collections.community.hashi_vault.docsite.user_guide.requirements: + +Requirements +============ + +The content in ``community.hashi_vault`` requires the `hvac <https://hvac.readthedocs.io/en/stable/>`_ library. + +.. code-block:: shell-session + + $ pip install hvac + +``hvac`` version specifics +-------------------------- + +In general, we recommend using the latest version of ``hvac`` that is supported for your given Python version because that is what we test against. Where possible we will try to list version-specific restrictions here, but this list may not be exhaustive. + +* ``hvac`` 0.7.0+ (for Azure auth and namespace support) +* ``hvac`` 0.9.6+ (to avoid most deprecation warnings) +* ``hvac`` 0.10.5+ (for JWT auth) +* ``hvac`` 0.10.6+ (to avoid deprecation warning for AppRole) +* ``hvac`` 0.10.12+ (for cert auth) + +Other requirements +------------------ + +* ``boto3`` (only if loading credentials from a boto session, for example using an AWS profile or IAM role credentials) +* ``azure-identity`` (only if using a service principal or managed identity) + +Retrying failed requests +======================== + +Via the ``retries`` parameter, you can control what happens when a request to Vault fails, and automatically retry certain requests. Retries are based on the `urllib3 Retry class <https://urllib3.readthedocs.io/en/latest/reference/urllib3.util.html#urllib3.util.Retry>`_ and so all of its options are supported. + +Retries are disabled by default. + +In ``community.hashi_vault`` you can specify the ``retries`` parameter in two ways: + +* Set a positive number (integer), where ``0`` disables retries and any positive number sets the number of tries, with the rest of the retry parameters using the collection defaults. +* Set a dictionary, where you can set any field that the ``Retry`` class can be initialized with, in order to fully customize your retry experience. + + +About the collection defaults +----------------------------- + +The collection uses its own set of recommended defaults for retries, including which HTTP status codes to retry, which HTTP methods are subject to retries, and the backoff factor used. **These defaults are subject to change at any time (in any release) and won't be considered breaking changes.** By setting ``retries`` to a number you are opting in to trust the defaults in the collection. To enable retries with full control over its behavior, be sure to specify a dictionary. + +Current Defaults (always check the source code to confirm the defaults in your specific collection version): + +.. code-block:: yaml + + status_forcelist: + # https://www.vaultproject.io/api#http-status-codes + # 429 is usually a "too many requests" status, but in Vault it's the default health status response for standby nodes. + - 412 # Precondition failed. Returned on Enterprise when a request can't be processed yet due to some missing eventually consistent data. Should be retried, perhaps with a little backoff. + - 500 # Internal server error. An internal error has occurred, try again later. If the error persists, report a bug. + - 502 # A request to Vault required Vault making a request to a third party; the third party responded with an error of some kind. + - 503 # Vault is down for maintenance or is currently sealed. Try again later. + allowed_methods: null # None allows retries on all methods, including those which may not be considered idempotent, like POST + backoff_factor: 0.3 + +Any of the ``Retry`` class's parameters that are not specified in the collection defaults or in your custom dictionary, are initialized using the class's defaults, with one exception: the ``raise_on_status`` parameter is always set to ``false`` unless you explicitly added it your custom dictionary. The reason is that this lets our error handling look for the expected ``hvac`` exceptions, instead of the ``Retry``-specfic exceptions. It is recommended that you don't override this as it may cause unexpected error messages on common failures if they are retried. + +Controlling retry warnings +-------------------------- + +By default, if a retry is performed, a warning will be emitted that shows how many retries are remaining. This can be controlled with the ``retry_action`` option which defaults to ``warn``. It is recommended to keep this enabled unless you have other processes that will be thrown off by the warning output. + +A note about timeouts +--------------------- + +Consider setting the ``timeout`` option appropriately when using retries, as a connection timeout doesn't count toward time between retries (backoff). A long timeout can cause very long delays for a connection that isn't going to recover, multiplied by number of retries. + +However, also consider the type of request being made, and the auth method in use. Because Vault auth methods may have their own dependencies on other systems (an LDAP server, a cloud provider like AWS, a required MFA prompt that depends on a human to respond), the time to complete a request could be quite long, and setting a timeout too short will prevent an otherwise successful request from completing. diff --git a/ansible_collections/community/hashi_vault/docs/preview/.gitignore b/ansible_collections/community/hashi_vault/docs/preview/.gitignore new file mode 100644 index 000000000..2def98f08 --- /dev/null +++ b/ansible_collections/community/hashi_vault/docs/preview/.gitignore @@ -0,0 +1,7 @@ +# Copyright (c) Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +/temp-rst +/build +/rst/collections diff --git a/ansible_collections/community/hashi_vault/docs/preview/antsibull-docs.cfg b/ansible_collections/community/hashi_vault/docs/preview/antsibull-docs.cfg new file mode 100644 index 000000000..9714411eb --- /dev/null +++ b/ansible_collections/community/hashi_vault/docs/preview/antsibull-docs.cfg @@ -0,0 +1,22 @@ +# Copyright (c) Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +breadcrumbs = true +indexes = true +use_html_blobs = false + +# You can specify ways to convert a collection name (<namespace>.<name>) to an URL here. +# You can replace either of <namespace> or <name> by "*" to match all values in that place, +# or use "*" for the collection name to match all collections. In the URL, you can use +# {namespace} and {name} for the two components of the collection name. If you want to use +# "{" or "}" in the URL, write "{{" or "}}" instead. Basically these are Python format +# strings (https://docs.python.org/3.8/library/string.html#formatstrings). +collection_url = { + * = "https://galaxy.ansible.com/{namespace}/{name}" +} + +# The same wildcard rules and formatting rules as for collection_url apply. +collection_install = { + * = "ansible-galaxy collection install {namespace}.{name}" +} diff --git a/ansible_collections/community/hashi_vault/docs/preview/build.sh b/ansible_collections/community/hashi_vault/docs/preview/build.sh new file mode 100755 index 000000000..eeb0c4f8e --- /dev/null +++ b/ansible_collections/community/hashi_vault/docs/preview/build.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash +# Copyright (c) Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +set -e +pushd "${BASH_SOURCE%/*}" + +# Create collection documentation into temporary directory +rm -rf temp-rst +mkdir -p temp-rst +antsibull-docs \ + --config-file antsibull-docs.cfg \ + collection \ + --use-current \ + --dest-dir temp-rst \ + community.hashi_vault + +# Copy collection documentation into source directory +rsync -cprv --delete-after temp-rst/collections/ rst/collections/ + +# Build Sphinx site +sphinx-build -M html rst build -c . -W --keep-going + +popd diff --git a/ansible_collections/community/hashi_vault/docs/preview/conf.py b/ansible_collections/community/hashi_vault/docs/preview/conf.py new file mode 100644 index 000000000..02685b344 --- /dev/null +++ b/ansible_collections/community/hashi_vault/docs/preview/conf.py @@ -0,0 +1,42 @@ +# Copyright (c) Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +# This file only contains a selection of the most common options. For a full list see the +# documentation: +# http://www.sphinx-doc.org/en/master/config + +project = 'Ansible collections' +copyright = 'Ansible contributors' + +title = 'Ansible Collections Documentation' +html_short_title = 'Ansible Collections Documentation' + +extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx_antsibull_ext'] + +pygments_style = 'ansible' + +highlight_language = 'YAML+Jinja' + +html_theme = 'sphinx_ansible_theme' +html_show_sphinx = False + +display_version = False + +html_use_smartypants = True +html_use_modindex = False +html_use_index = False +html_copy_source = False + +intersphinx_mapping = { + 'python': ('https://docs.python.org/2/', (None, '../python2.inv')), + 'python3': ('https://docs.python.org/3/', (None, '../python3.inv')), + 'jinja2': ('http://jinja.palletsprojects.com/', (None, '../jinja2.inv')), + 'ansible_devel': ('https://docs.ansible.com/ansible/devel/', (None, '../ansible_devel.inv')), + # If you want references to resolve to a released Ansible version (say, `5`), uncomment and replace X by this version: + # 'ansibleX': ('https://docs.ansible.com/ansible/X/', (None, '../ansibleX.inv')), +} + +default_role = 'any' + +nitpicky = True diff --git a/ansible_collections/community/hashi_vault/docs/preview/requirements.txt b/ansible_collections/community/hashi_vault/docs/preview/requirements.txt new file mode 100644 index 000000000..afc7e88e8 --- /dev/null +++ b/ansible_collections/community/hashi_vault/docs/preview/requirements.txt @@ -0,0 +1,8 @@ +# Copyright (c) Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +antsibull-docs >= 1.0.0, < 2.0.0 +ansible-pygments +sphinx != 5.2.0.post0 # temporary, see https://github.com/ansible-community/antsibull-docs/issues/39, https://github.com/ansible-community/antsibull-docs/issues/40 +sphinx-ansible-theme >= 0.9.0 diff --git a/ansible_collections/community/hashi_vault/docs/preview/rst/index.rst b/ansible_collections/community/hashi_vault/docs/preview/rst/index.rst new file mode 100644 index 000000000..19db644bc --- /dev/null +++ b/ansible_collections/community/hashi_vault/docs/preview/rst/index.rst @@ -0,0 +1,25 @@ +# Copyright (c) Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +.. _docsite_root_index: + +Ansible collection documentation preview +======================================== + +This docsite contains documentation for ``community.hashi_vault``. + + +.. toctree:: + :maxdepth: 2 + :caption: Collections: + + collections/index + + +.. toctree:: + :maxdepth: 1 + :caption: Plugin indexes: + :glob: + + collections/index_* diff --git a/ansible_collections/community/hashi_vault/meta/ee-requirements.txt b/ansible_collections/community/hashi_vault/meta/ee-requirements.txt new file mode 100644 index 000000000..53393fd3e --- /dev/null +++ b/ansible_collections/community/hashi_vault/meta/ee-requirements.txt @@ -0,0 +1,9 @@ +# ansible-builder doesn't seem to properly handle "; python_version" type of constraints +# requirements here are assuming python 3.6 or higher +hvac >=0.10.6 +urllib3 >= 1.15 + +boto3 # these are only needed if inferring AWS credentials or +botocore # using a boto profile; including for completeness + +azure-identity # only needed when using a servide principal or managed identity diff --git a/ansible_collections/community/hashi_vault/meta/execution-environment.yml b/ansible_collections/community/hashi_vault/meta/execution-environment.yml new file mode 100644 index 000000000..c89949397 --- /dev/null +++ b/ansible_collections/community/hashi_vault/meta/execution-environment.yml @@ -0,0 +1,4 @@ +--- +version: 1 +dependencies: + python: meta/ee-requirements.txt diff --git a/ansible_collections/community/hashi_vault/meta/runtime.yml b/ansible_collections/community/hashi_vault/meta/runtime.yml new file mode 100644 index 000000000..02b09ecd2 --- /dev/null +++ b/ansible_collections/community/hashi_vault/meta/runtime.yml @@ -0,0 +1,15 @@ +--- +requires_ansible: '>=2.11.0' +action_groups: + # let's keep this in alphabetical order + vault: + - vault_kv1_get + - vault_kv2_delete + - vault_kv2_get + - vault_kv2_write + - vault_list + - vault_login + - vault_pki_generate_certificate + - vault_read + - vault_token_create + - vault_write diff --git a/ansible_collections/community/hashi_vault/plugins/doc_fragments/attributes.py b/ansible_collections/community/hashi_vault/plugins/doc_fragments/attributes.py new file mode 100644 index 000000000..7536fa794 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/doc_fragments/attributes.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2022, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +class ModuleDocFragment(object): + + DOCUMENTATION = r''' +options: {} +attributes: + check_mode: + description: Can run in C(check_mode) and return changed status prediction without modifying target. +''' + + ACTION_GROUP = r''' +options: {} +attributes: + action_group: + description: Use C(group/community.hashi_vault.vault) in C(module_defaults) to set defaults for this module. + support: full + membership: + - community.hashi_vault.vault +''' + + # Should be used together with the standard fragment + CHECK_MODE_READ_ONLY = r''' +options: {} +attributes: + check_mode: + support: full + details: + - This module is "read only" and operates the same regardless of check mode. +''' diff --git a/ansible_collections/community/hashi_vault/plugins/doc_fragments/auth.py b/ansible_collections/community/hashi_vault/plugins/doc_fragments/auth.py new file mode 100644 index 000000000..8c6bd8760 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/doc_fragments/auth.py @@ -0,0 +1,308 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2021, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +class ModuleDocFragment(object): + + DOCUMENTATION = r''' + options: + auth_method: + description: + - Authentication method to be used. + - C(none) auth method was added in collection version C(1.2.0). + - C(cert) auth method was added in collection version C(1.4.0). + - C(aws_iam_login) was renamed C(aws_iam) in collection version C(2.1.0) and was removed in C(3.0.0). + - C(azure) auth method was added in collection version C(3.2.0). + choices: + - token + - userpass + - ldap + - approle + - aws_iam + - azure + - jwt + - cert + - none + default: token + type: str + mount_point: + description: + - Vault mount point. + - If not specified, the default mount point for a given auth method is used. + - Does not apply to token authentication. + type: str + token: + description: + - Vault token. Token may be specified explicitly, through the listed [env] vars, and also through the C(VAULT_TOKEN) env var. + - If no token is supplied, explicitly or through env, then the plugin will check for a token file, as determined by I(token_path) and I(token_file). + - The order of token loading (first found wins) is C(token param -> ansible var -> ANSIBLE_HASHI_VAULT_TOKEN -> VAULT_TOKEN -> token file). + type: str + token_path: + description: If no token is specified, will try to read the I(token_file) from this path. + type: str + token_file: + description: If no token is specified, will try to read the token from this file in I(token_path). + default: '.vault-token' + type: str + token_validate: + description: + - For token auth, will perform a C(lookup-self) operation to determine the token's validity before using it. + - Disable if your token does not have the C(lookup-self) capability. + type: bool + default: false + version_added: 0.2.0 + username: + description: Authentication user name. + type: str + password: + description: Authentication password. + type: str + role_id: + description: + - Vault Role ID or name. Used in C(approle), C(aws_iam), C(azure) and C(cert) auth methods. + - For C(cert) auth, if no I(role_id) is supplied, the default behavior is to try all certificate roles and return any one that matches. + - For C(azure) auth, I(role_id) is required. + type: str + secret_id: + description: Secret ID to be used for Vault AppRole authentication. + type: str + jwt: + description: The JSON Web Token (JWT) to use for JWT authentication to Vault. + type: str + aws_profile: + description: The AWS profile + type: str + aliases: [ boto_profile ] + aws_access_key: + description: The AWS access key to use. + type: str + aliases: [ aws_access_key_id ] + aws_secret_key: + description: The AWS secret key that corresponds to the access key. + type: str + aliases: [ aws_secret_access_key ] + aws_security_token: + description: The AWS security token if using temporary access and secret keys. + type: str + region: + description: The AWS region for which to create the connection. + type: str + aws_iam_server_id: + description: If specified, sets the value to use for the C(X-Vault-AWS-IAM-Server-ID) header as part of C(GetCallerIdentity) request. + required: False + type: str + version_added: '0.2.0' + azure_tenant_id: + description: + - The Azure Active Directory Tenant ID (also known as the Directory ID) of the service principal. Should be a UUID. + - >- + Required when using a service principal to authenticate to Vault, + e.g. required when both I(azure_client_id) and I(azure_client_secret) are specified. + - Optional when using managed identity to authenticate to Vault. + required: False + type: str + version_added: '3.2.0' + azure_client_id: + description: + - The client ID (also known as application ID) of the Azure AD service principal or managed identity. Should be a UUID. + - If not specified, will use the system assigned managed identity. + required: False + type: str + version_added: '3.2.0' + azure_client_secret: + description: The client secret of the Azure AD service principal. + required: False + type: str + version_added: '3.2.0' + azure_resource: + description: The resource URL for the application registered in Azure Active Directory. Usually should not be changed from the default. + required: False + type: str + default: https://management.azure.com/ + version_added: '3.2.0' + cert_auth_public_key: + description: For C(cert) auth, path to the certificate file to authenticate with, in PEM format. + type: path + version_added: 1.4.0 + cert_auth_private_key: + description: For C(cert) auth, path to the private key file to authenticate with, in PEM format. + type: path + version_added: 1.4.0 + ''' + + PLUGINS = r''' + options: + auth_method: + env: + - name: ANSIBLE_HASHI_VAULT_AUTH_METHOD + version_added: 0.2.0 + ini: + - section: hashi_vault_collection + key: auth_method + version_added: 1.4.0 + vars: + - name: ansible_hashi_vault_auth_method + version_added: 1.2.0 + mount_point: + env: + - name: ANSIBLE_HASHI_VAULT_MOUNT_POINT + version_added: 1.5.0 + ini: + - section: hashi_vault_collection + key: mount_point + version_added: 1.5.0 + vars: + - name: ansible_hashi_vault_mount_point + version_added: 1.5.0 + token: + env: + - name: ANSIBLE_HASHI_VAULT_TOKEN + version_added: 0.2.0 + vars: + - name: ansible_hashi_vault_token + version_added: 1.2.0 + token_path: + env: + - name: ANSIBLE_HASHI_VAULT_TOKEN_PATH + version_added: 0.2.0 + ini: + - section: hashi_vault_collection + key: token_path + version_added: 1.4.0 + vars: + - name: ansible_hashi_vault_token_path + version_added: 1.2.0 + token_file: + env: + - name: ANSIBLE_HASHI_VAULT_TOKEN_FILE + version_added: 0.2.0 + ini: + - section: hashi_vault_collection + key: token_file + version_added: 1.4.0 + vars: + - name: ansible_hashi_vault_token_file + version_added: 1.2.0 + token_validate: + env: + - name: ANSIBLE_HASHI_VAULT_TOKEN_VALIDATE + ini: + - section: hashi_vault_collection + key: token_validate + version_added: 1.4.0 + vars: + - name: ansible_hashi_vault_token_validate + version_added: 1.2.0 + username: + env: + - name: ANSIBLE_HASHI_VAULT_USERNAME + version_added: '1.2.0' + vars: + - name: ansible_hashi_vault_username + version_added: '1.2.0' + password: + env: + - name: ANSIBLE_HASHI_VAULT_PASSWORD + version_added: '1.2.0' + vars: + - name: ansible_hashi_vault_password + version_added: '1.2.0' + role_id: + env: + - name: ANSIBLE_HASHI_VAULT_ROLE_ID + version_added: 0.2.0 + ini: + - section: hashi_vault_collection + key: role_id + version_added: 1.4.0 + vars: + - name: ansible_hashi_vault_role_id + version_added: 1.2.0 + secret_id: + env: + - name: ANSIBLE_HASHI_VAULT_SECRET_ID + version_added: 0.2.0 + vars: + - name: ansible_hashi_vault_secret_id + version_added: 1.2.0 + jwt: + env: + - name: ANSIBLE_HASHI_VAULT_JWT + aws_profile: + env: + - name: AWS_DEFAULT_PROFILE + - name: AWS_PROFILE + aws_access_key: + env: + - name: EC2_ACCESS_KEY + - name: AWS_ACCESS_KEY + - name: AWS_ACCESS_KEY_ID + aws_secret_key: + env: + - name: EC2_SECRET_KEY + - name: AWS_SECRET_KEY + - name: AWS_SECRET_ACCESS_KEY + aws_security_token: + env: + - name: EC2_SECURITY_TOKEN + - name: AWS_SESSION_TOKEN + - name: AWS_SECURITY_TOKEN + region: + env: + - name: EC2_REGION + - name: AWS_REGION + aws_iam_server_id: + env: + - name: ANSIBLE_HASHI_VAULT_AWS_IAM_SERVER_ID + ini: + - section: hashi_vault_collection + key: aws_iam_server_id + version_added: 1.4.0 + azure_tenant_id: + env: + - name: ANSIBLE_HASHI_VAULT_AZURE_TENANT_ID + ini: + - section: hashi_vault_collection + key: azure_tenant_id + vars: + - name: ansible_hashi_vault_azure_tenant_id + azure_client_id: + env: + - name: ANSIBLE_HASHI_VAULT_AZURE_CLIENT_ID + ini: + - section: hashi_vault_collection + key: azure_client_id + vars: + - name: ansible_hashi_vault_azure_client_id + azure_client_secret: + env: + - name: ANSIBLE_HASHI_VAULT_AZURE_CLIENT_SECRET + vars: + - name: ansible_hashi_vault_azure_client_secret + azure_resource: + env: + - name: ANSIBLE_HASHI_VAULT_AZURE_RESOURCE + ini: + - section: hashi_vault_collection + key: azure_resource + vars: + - name: ansible_hashi_vault_azure_resource + cert_auth_public_key: + env: + - name: ANSIBLE_HASHI_VAULT_CERT_AUTH_PUBLIC_KEY + ini: + - section: hashi_vault_collection + key: cert_auth_public_key + cert_auth_private_key: + env: + - name: ANSIBLE_HASHI_VAULT_CERT_AUTH_PRIVATE_KEY + ini: + - section: hashi_vault_collection + key: cert_auth_private_key + ''' diff --git a/ansible_collections/community/hashi_vault/plugins/doc_fragments/connection.py b/ansible_collections/community/hashi_vault/plugins/doc_fragments/connection.py new file mode 100644 index 000000000..e7ab8d07a --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/doc_fragments/connection.py @@ -0,0 +1,161 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2021, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +class ModuleDocFragment(object): + + DOCUMENTATION = r''' + options: + url: + description: + - URL to the Vault service. + - If not specified by any other means, the value of the C(VAULT_ADDR) environment variable will be used. + - If C(VAULT_ADDR) is also not defined then an error will be raised. + type: str + proxies: + description: + - URL(s) to the proxies used to access the Vault service. + - It can be a string or a dict. + - If it's a dict, provide the scheme (eg. C(http) or C(https)) as the key, and the URL as the value. + - If it's a string, provide a single URL that will be used as the proxy for both C(http) and C(https) schemes. + - A string that can be interpreted as a dictionary will be converted to one (see examples). + - You can specify a different proxy for HTTP and HTTPS resources. + - If not specified, L(environment variables from the Requests library,https://requests.readthedocs.io/en/master/user/advanced/#proxies) are used. + type: raw + version_added: 1.1.0 + ca_cert: + description: + - Path to certificate to use for authentication. + - If not specified by any other means, the C(VAULT_CACERT) environment variable will be used. + aliases: [ cacert ] + type: str + validate_certs: + description: + - Controls verification and validation of SSL certificates, mostly you only want to turn off with self signed ones. + - Will be populated with the inverse of C(VAULT_SKIP_VERIFY) if that is set and I(validate_certs) is not explicitly provided. + - Will default to C(true) if neither I(validate_certs) or C(VAULT_SKIP_VERIFY) are set. + type: bool + namespace: + description: + - Vault namespace where secrets reside. This option requires HVAC 0.7.0+ and Vault 0.11+. + - Optionally, this may be achieved by prefixing the authentication mount point and/or secret path with the namespace + (e.g C(mynamespace/secret/mysecret)). + - If environment variable C(VAULT_NAMESPACE) is set, its value will be used last among all ways to specify I(namespace). + type: str + timeout: + description: + - Sets the connection timeout in seconds. + - If not set, then the C(hvac) library's default is used. + type: int + version_added: 1.3.0 + retries: + description: + - "Allows for retrying on errors, based on + the L(Retry class in the urllib3 library,https://urllib3.readthedocs.io/en/latest/reference/urllib3.util.html#urllib3.util.Retry)." + - This collection defines recommended defaults for retrying connections to Vault. + - This option can be specified as a positive number (integer) or dictionary. + - If this option is not specified or the number is C(0), then retries are disabled. + - A number sets the total number of retries, and uses collection defaults for the other settings. + - A dictionary value is used directly to initialize the C(Retry) class, so it can be used to fully customize retries. + - For detailed information on retries, see the collection User Guide. + type: raw + version_added: 1.3.0 + retry_action: + description: + - Controls whether and how to show messages on I(retries). + - This has no effect if a request is not retried. + type: str + choices: + - ignore + - warn + default: warn + version_added: 1.3.0 + ''' + + PLUGINS = r''' + options: + url: + env: + - name: ANSIBLE_HASHI_VAULT_ADDR + version_added: 0.2.0 + ini: + - section: hashi_vault_collection + key: url + version_added: 1.4.0 + vars: + - name: ansible_hashi_vault_url + version_added: 1.2.0 + - name: ansible_hashi_vault_addr + version_added: 1.2.0 + proxies: + env: + - name: ANSIBLE_HASHI_VAULT_PROXIES + ini: + - section: hashi_vault_collection + key: proxies + version_added: 1.4.0 + vars: + - name: ansible_hashi_vault_proxies + version_added: 1.2.0 + type: raw + version_added: 1.1.0 + ca_cert: + env: + - name: ANSIBLE_HASHI_VAULT_CA_CERT + version_added: 1.2.0 + ini: + - section: hashi_vault_collection + key: ca_cert + version_added: 1.4.0 + vars: + - name: ansible_hashi_vault_ca_cert + version_added: 1.2.0 + validate_certs: + vars: + - name: ansible_hashi_vault_validate_certs + version_added: 1.2.0 + namespace: + env: + - name: ANSIBLE_HASHI_VAULT_NAMESPACE + version_added: 0.2.0 + ini: + - section: hashi_vault_collection + key: namespace + version_added: 1.4.0 + vars: + - name: ansible_hashi_vault_namespace + version_added: 1.2.0 + timeout: + env: + - name: ANSIBLE_HASHI_VAULT_TIMEOUT + ini: + - section: hashi_vault_collection + key: timeout + version_added: 1.4.0 + vars: + - name: ansible_hashi_vault_timeout + retries: + env: + - name: ANSIBLE_HASHI_VAULT_RETRIES + ini: + - section: hashi_vault_collection + key: retries + version_added: 1.4.0 + vars: + - name: ansible_hashi_vault_retries + retry_action: + env: + - name: ANSIBLE_HASHI_VAULT_RETRY_ACTION + ini: + - section: hashi_vault_collection + key: retry_action + version_added: 1.4.0 + vars: + - name: ansible_hashi_vault_retry_action + ''' diff --git a/ansible_collections/community/hashi_vault/plugins/doc_fragments/engine_mount.py b/ansible_collections/community/hashi_vault/plugins/doc_fragments/engine_mount.py new file mode 100644 index 000000000..8187f28e3 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/doc_fragments/engine_mount.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2022, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +class ModuleDocFragment(object): + + DOCUMENTATION = r''' +options: + engine_mount_point: + description: The path where the secret backend is mounted. + type: str +''' + + PLUGINS = r''' +options: + engine_mount_point: + vars: + - name: ansible_hashi_vault_engine_mount_point +''' diff --git a/ansible_collections/community/hashi_vault/plugins/doc_fragments/token_create.py b/ansible_collections/community/hashi_vault/plugins/doc_fragments/token_create.py new file mode 100644 index 000000000..30031f076 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/doc_fragments/token_create.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2022, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +class ModuleDocFragment(object): + + DOCUMENTATION = r''' +options: + orphan: + description: + - When C(true), uses the C(/create-orphan) API endpoint, which requires C(sudo) (but not C(root)) to create an orphan. + - With C(hvac>=1.0.0), requires collection version C(>=3.3.0). + type: bool + default: false + no_parent: + description: + - This option only has effect if used by a C(root) or C(sudo) caller and only when I(orphan=false). + - When C(true), the token created will not have a parent. + type: bool + no_default_policy: + description: + - If C(true) the default policy will not be contained in this token's policy set. + - If the token will be used with this collection, set I(token_validate=false). + type: bool + policies: + description: + - A list of policies for the token. This must be a subset of the policies belonging to the token making the request, unless root. + - If not specified, defaults to all the policies of the calling token. + type: list + elements: str + id: + description: + - The ID of the client token. Can only be specified by a root token. + - The ID provided may not contain a C(.) character. + - Otherwise, the token ID is a randomly generated value. + type: str + role_name: + description: + - The name of the token role. If used, the token will be created against the specified role name which may override options set during this call. + type: str + meta: + description: A dict of string to string valued metadata. This is passed through to the audit devices. + type: dict + renewable: + description: + - Set to C(false) to disable the ability of the token to be renewed past its initial TTL. + - Setting the value to C(true) will allow the token to be renewable up to the system/mount maximum TTL. + type: bool + ttl: + description: + - The TTL period of the token, provided as C(1h) for example, where hour is the largest suffix. + - If not provided, the token is valid for the default lease TTL, or indefinitely if the root policy is used. + type: str + type: + description: The token type. The default is determined by the role configuration specified by I(role_name). + type: str + choices: + - batch + - service + explicit_max_ttl: + description: + - If set, the token will have an explicit max TTL set upon it. + - This maximum token TTL cannot be changed later, + and unlike with normal tokens, updates to the system/mount max TTL value will have no effect at renewal time. + - The token will never be able to be renewed or used past the value set at issue time. + type: str + display_name: + description: The display name of the token. + type: str + num_uses: + description: + - The maximum uses for the given token. This can be used to create a one-time-token or limited use token. + - The value of C(0) has no limit to the number of uses. + type: int + period: + description: + - If specified, the token will be periodic. + - It will have no maximum TTL (unless an I(explicit_max_ttl) is also set) but every renewal will use the given period. + - Requires a root token or one with the C(sudo) capability. + type: str + entity_alias: + description: + - Name of the entity alias to associate with during token creation. + - Only works in combination with I(role_name) option and used entity alias must be listed in C(allowed_entity_aliases). + - If this has been specified, the entity will not be inherited from the parent. + type: str +''' diff --git a/ansible_collections/community/hashi_vault/plugins/doc_fragments/wrapping.py b/ansible_collections/community/hashi_vault/plugins/doc_fragments/wrapping.py new file mode 100644 index 000000000..720f7bdfa --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/doc_fragments/wrapping.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2022, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +class ModuleDocFragment(object): + + DOCUMENTATION = r''' +options: + wrap_ttl: + description: Specifies response wrapping token creation with duration. For example C(15s), C(20m), C(25h). + type: str +''' + + PLUGINS = r''' +options: + wrap_ttl: + vars: + - name: ansible_hashi_vault_wrap_ttl +''' diff --git a/ansible_collections/community/hashi_vault/plugins/filter/vault_login_token.py b/ansible_collections/community/hashi_vault/plugins/filter/vault_login_token.py new file mode 100644 index 000000000..543d60c3f --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/filter/vault_login_token.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# (c) 2021, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +from ansible.errors import AnsibleError + + +def vault_login_token(login_response, optional_field='login'): + '''Extracts the token value from a Vault login response. + Meant to be used with the vault_login module and lookup plugin. + ''' + + try: + deref = login_response[optional_field] + except TypeError: + raise AnsibleError("The 'vault_login_token' filter expects a dictionary.") + except KeyError: + deref = login_response + + try: + token = deref['auth']['client_token'] + except KeyError: + raise AnsibleError("Could not find 'auth' or 'auth.client_token' fields. Input may not be a Vault login response.") + + return token + + +class FilterModule(object): + '''Ansible jinja2 filters''' + + def filters(self): + return { + 'vault_login_token': vault_login_token, + } diff --git a/ansible_collections/community/hashi_vault/plugins/filter/vault_login_token.yml b/ansible_collections/community/hashi_vault/plugins/filter/vault_login_token.yml new file mode 100644 index 000000000..e2946bafe --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/filter/vault_login_token.yml @@ -0,0 +1,98 @@ +# (c) 2022, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later +--- +DOCUMENTATION: + name: vault_login_token + short_description: Extracts the Vault token from a login or token creation + version_added: 2.2.0 + description: + - Extracts the token value from the structure returned by a Vault token creation operation. + seealso: + - module: community.hashi_vault.vault_login + - module: community.hashi_vault.vault_token_create + - plugin: community.hashi_vault.vault_login + plugin_type: lookup + - plugin: community.hashi_vault.vault_token_create + plugin_type: lookup + - ref: Filter Guide <ansible_collections.community.hashi_vault.docsite.filter_guide.vault_login_token> + description: The C(community.hashi_vault) Filter Guide + notes: + - >- + This filter is the same as reading into the I(_input) dictionary directly, + but it provides semantic meaning and automatically works with the differing output of the modules and lookups. + See the Filter guide for more information. + options: + _input: + description: + - A dictionary matching the structure returned by a login or token creation. + type: dict + required: true + optional_field: + description: + - >- + If this field exists in the input dictionary, then the value of that field is used as the I(_input) value. + - >- + The default value deals with the difference between the output of lookup plugins, + and does not need to be changed in most cases. + - See the examples or the Filter guide for more information. + type: string + default: login + author: + - Brian Scholer (@briantist) + +EXAMPLES: | + - name: Set defaults + vars: + ansible_hashi_vault_url: https://vault:9801/ + ansible_hashi_vault_auth_method: userpass + ansible_hashi_vault_username: user + ansible_hashi_vault_password: "{{ lookup('env', 'MY_SECRET_PASSWORD') }}" + module_defaults: + community.hashi_vault.vault_login: + url: '{{ ansible_hashi_vault_url }}' + auth_method: '{{ ansible_hashi_vault_auth_method }}' + username: '{{ ansible_hashi_vault_username }}' + password: '{{ ansible_hashi_vault_password }}' + block: + - name: Perform a login with a lookup and display the token + vars: + login_response: "{{ lookup('community.hashi_vault.vault_login') }}" + debug: + msg: "The token is {{ login_response | community.hashi_vault.vault_login_token }}" + + - name: Perform a login with a module + community.hashi_vault.vault_login: + register: login_response + + - name: Display the token + debug: + msg: "The token is {{ login_response | community.hashi_vault.vault_login_token }}" + + - name: Use of optional_field + vars: + lookup_login_response: "{{ lookup('community.hashi_vault.vault_login') }}" + my_data: + something: somedata + vault_login: "{{ lookup_login_response }}" + + token_from_param: "{{ my_data | community.hashi_vault.vault_login_token(optional_field='vault_login') }}" + token_from_deref: "{{ my_data['vault_login'] | community.hashi_vault.vault_login_token }}" + # if the optional field doesn't exist, the dictionary itself is still checked + unused_optional: "{{ my_data['vault_login'] | community.hashi_vault.vault_login_token(optional_field='missing') }}" + block: + - name: Display the variables + ansible.builtin.debug: + var: '{{ item }}' + loop: + - my_data + - token_from_param + - token_from_deref + - unused_optional + +RETURN: + _value: + description: The token value. + returned: always + sample: s.nnrpog4i5gjizr6b8g1inwj3 + type: string diff --git a/ansible_collections/community/hashi_vault/plugins/lookup/hashi_vault.py b/ansible_collections/community/hashi_vault/plugins/lookup/hashi_vault.py new file mode 100644 index 000000000..1ea9b2c90 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/lookup/hashi_vault.py @@ -0,0 +1,349 @@ +# (c) 2020, Brian Scholer (@briantist) +# (c) 2015, Julie Davila (@juliedavila) <julie(at)davila.io> +# (c) 2017 Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = """ + name: hashi_vault + author: + - Julie Davila (@juliedavila) <julie(at)davila.io> + - Brian Scholer (@briantist) + short_description: Retrieve secrets from HashiCorp's Vault + requirements: + - C(hvac) (L(Python library,https://hvac.readthedocs.io/en/stable/overview.html)) + - For detailed requirements, see R(the collection requirements page,ansible_collections.community.hashi_vault.docsite.user_guide.requirements). + description: + - Retrieve secrets from HashiCorp's Vault. + - Consider R(migrating to other plugins in the collection,ansible_collections.community.hashi_vault.docsite.migration_hashi_vault_lookup). + seealso: + - ref: community.hashi_vault.hashi_vault Migration Guide <ansible_collections.community.hashi_vault.docsite.migration_hashi_vault_lookup> + description: Migrating from the C(hashi_vault) lookup. + - ref: About the community.hashi_vault.hashi_vault lookup <ansible_collections.community.hashi_vault.docsite.about_hashi_vault_lookup> + description: The past, present, and future of the C(hashi_vault) lookup. + - ref: community.hashi_vault.vault_read lookup <ansible_collections.community.hashi_vault.vault_read_lookup> + description: The official documentation for the C(community.hashi_vault.vault_read) lookup plugin. + - module: community.hashi_vault.vault_read + - ref: community.hashi_vault.vault_kv2_get lookup <ansible_collections.community.hashi_vault.vault_kv2_get_lookup> + description: The official documentation for the C(community.hashi_vault.vault_kv2_get) lookup plugin. + - module: community.hashi_vault.vault_kv2_get + - ref: community.hashi_vault.vault_kv1_get lookup <ansible_collections.community.hashi_vault.vault_kv1_get_lookup> + description: The official documentation for the C(community.hashi_vault.vault_kv1_get) lookup plugin. + - module: community.hashi_vault.vault_kv1_get + - ref: community.hashi_vault Lookup Guide <ansible_collections.community.hashi_vault.docsite.lookup_guide> + description: Guidance on using lookups in C(community.hashi_vault). + notes: + - Due to a current limitation in the HVAC library there won't necessarily be an error if a bad endpoint is specified. + - As of community.hashi_vault 0.1.0, only the latest version of a secret is returned when specifying a KV v2 path. + - As of community.hashi_vault 0.1.0, all options can be supplied via term string (space delimited key=value pairs) or by parameters (see examples). + - As of community.hashi_vault 0.1.0, when I(secret) is the first option in the term string, C(secret=) is not required (see examples). + extends_documentation_fragment: + - community.hashi_vault.connection + - community.hashi_vault.connection.plugins + - community.hashi_vault.auth + - community.hashi_vault.auth.plugins + options: + secret: + description: Vault path to the secret being requested in the format C(path[:field]). + required: True + return_format: + description: + - Controls how multiple key/value pairs in a path are treated on return. + - C(dict) returns a single dict containing the key/value pairs. + - C(values) returns a list of all the values only. Use when you don't care about the keys. + - C(raw) returns the actual API result (deserialized), which includes metadata and may have the data nested in other keys. + choices: + - dict + - values + - raw + default: dict + aliases: [ as ] +""" + +EXAMPLES = """ +- ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.hashi_vault', 'secret=secret/hello:value token=c975b780-d1be-8016-866b-01d0f9b688a5 url=http://myvault:8200') }}" + +- name: Return all secrets from a path + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.hashi_vault', 'secret=secret/hello token=c975b780-d1be-8016-866b-01d0f9b688a5 url=http://myvault:8200') }}" + +- name: Vault that requires authentication via LDAP + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.hashi_vault', 'secret/hello:value auth_method=ldap mount_point=ldap username=myuser password=mypas') }}" + +- name: Vault that requires authentication via username and password + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.hashi_vault', 'secret=secret/hola:val auth_method=userpass username=myuser password=psw url=http://vault:8200') }}" + +- name: Connect to Vault using TLS + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.hashi_vault', 'secret=secret/hola:value token=c975b780-d1be-8016-866b-01d0f9b688a5 validate_certs=False') }}" + +- name: using certificate auth + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.hashi_vault', 'secret/hi:val token=xxxx url=https://vault:8200 validate_certs=True cacert=/cacert/path/ca.pem') }}" + +- name: Authenticate with a Vault app role + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.hashi_vault', 'secret=secret/hello:value auth_method=approle role_id=myroleid secret_id=mysecretid') }}" + +- name: Return all secrets from a path in a namespace + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.hashi_vault', 'secret=secret/hello token=c975b780-d1be-8016-866b-01d0f9b688a5 namespace=teama/admins') }}" + +# When using KV v2 the PATH should include "data" between the secret engine mount and path (e.g. "secret/data/:path") +# see: https://www.vaultproject.io/api/secret/kv/kv-v2.html#read-secret-version +- name: Return latest KV v2 secret from path + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.hashi_vault', 'secret=secret/data/hello token=my_vault_token url=http://myvault_url:8200') }}" + +# The following examples show more modern syntax, with parameters specified separately from the term string. + +- name: secret= is not required if secret is first + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.hashi_vault', 'secret/data/hello token=<token> url=http://myvault_url:8200') }}" + +- name: options can be specified as parameters rather than put in term string + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.hashi_vault', 'secret/data/hello', token=my_token_var, url='http://myvault_url:8200') }}" + +# return_format (or its alias 'as') can control how secrets are returned to you +- name: return secrets as a dict (default) + ansible.builtin.set_fact: + my_secrets: "{{ lookup('community.hashi_vault.hashi_vault', 'secret/data/manysecrets', token=my_token_var, url='http://myvault_url:8200') }}" +- ansible.builtin.debug: + msg: "{{ my_secrets['secret_key'] }}" +- ansible.builtin.debug: + msg: "Secret '{{ item.key }}' has value '{{ item.value }}'" + loop: "{{ my_secrets | dict2items }}" + +- name: return secrets as values only + ansible.builtin.debug: + msg: "A secret value: {{ item }}" + loop: "{{ query('community.hashi_vault.hashi_vault', 'secret/data/manysecrets', token=my_token_var, url='http://vault_url:8200', return_format='values') }}" + +- name: return raw secret from API, including metadata + ansible.builtin.set_fact: + my_secret: "{{ lookup('community.hashi_vault.hashi_vault', 'secret/data/hello:value', token=my_token_var, url='http://myvault_url:8200', as='raw') }}" +- ansible.builtin.debug: + msg: "This is version {{ my_secret['metadata']['version'] }} of hello:value. The secret data is {{ my_secret['data']['data']['value'] }}" + +# AWS IAM authentication method +# uses Ansible standard AWS options + +- name: authenticate with aws_iam + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.hashi_vault', 'secret/hello:value', auth_method='aws_iam', role_id='myroleid', profile=my_boto_profile) }}" + +# JWT auth + +- name: Authenticate with a JWT + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.hashi_vault', 'secret/hola:val', auth_method='jwt', role_id='myroleid', jwt='myjwt', url='https://vault:8200') }}" + +# Disabling Token Validation +# Use this when your token does not have the lookup-self capability. Usually this is applied to all tokens via the default policy. +# However you can choose to create tokens without applying the default policy, or you can modify your default policy not to include it. +# When disabled, your invalid or expired token will be indistinguishable from insufficent permissions. + +- name: authenticate without token validation + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.hashi_vault', 'secret/hello:value', token=my_token, token_validate=False) }}" + +# "none" auth method does no authentication and does not send a token to the Vault address. +# One example of where this could be used is with a Vault agent where the agent will handle authentication to Vault. +# https://www.vaultproject.io/docs/agent + +- name: authenticate with vault agent + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.hashi_vault', 'secret/hello:value', auth_method='none', url='http://127.0.0.1:8100') }}" + +# Use a proxy + +- name: use a proxy with login/password + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.hashi_vault', 'secret=... token=... url=https://... proxies=https://user:pass@myproxy:8080') }}" + +- name: 'use a socks proxy (need some additional dependencies, see: https://requests.readthedocs.io/en/master/user/advanced/#socks )' + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.hashi_vault', 'secret=... token=... url=https://... proxies=socks5://myproxy:1080') }}" + +- name: use proxies with a dict (as param) + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.hashi_vault', '...', proxies={'http': 'http://myproxy1', 'https': 'http://myproxy2'}) }}" + +- name: use proxies with a dict (as param, pre-defined var) + vars: + prox: + http: http://myproxy1 + https: https://myproxy2 + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.hashi_vault', '...', proxies=prox }}" + +- name: use proxies with a dict (as direct ansible var) + vars: + ansible_hashi_vault_proxies: + http: http://myproxy1 + https: https://myproxy2 + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.hashi_vault', '...' }}" + +- name: use proxies with a dict (in the term string, JSON syntax) + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.hashi_vault', '... proxies={\\"http\\":\\"http://myproxy1\\",\\"https\\":\\"http://myproxy2\\"}') }}" + +- name: use ansible vars to supply some options + vars: + ansible_hashi_vault_url: 'https://myvault:8282' + ansible_hashi_vault_auth_method: token + set_fact: + secret1: "{{ lookup('community.hashi_vault.hashi_vault', 'secret/data/secret1') }}" + secret2: "{{ lookup('community.hashi_vault.hashi_vault', 'secret/data/secret2') }}" + +- name: use a custom timeout + debug: + msg: "{{ lookup('community.hashi_vault.hashi_vault', 'secret/data/secret1', timeout=120) }}" + +- name: use a custom timeout and retry on failure 3 times (with collection retry defaults) + vars: + ansible_hashi_vault_timeout: 5 + ansible_hashi_vault_retries: 3 + debug: + msg: "{{ lookup('community.hashi_vault.hashi_vault', 'secret/data/secret1') }}" + +- name: retry on failure (with custom retry settings and no warnings) + vars: + ansible_hashi_vault_retries: + total: 6 + backoff_factor: 0.9 + status_forcelist: [500, 502] + allowed_methods: + - GET + - PUT + debug: + msg: "{{ lookup('community.hashi_vault.hashi_vault', 'secret/data/secret1', retry_action='warn') }}" +""" + +RETURN = """ +_raw: + description: + - secrets(s) requested + type: list + elements: dict +""" + +from ansible.errors import AnsibleError +from ansible.utils.display import Display + +from ansible_collections.community.hashi_vault.plugins.plugin_utils._hashi_vault_lookup_base import HashiVaultLookupBase +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultValueError + +display = Display() + +HAS_HVAC = False +try: + import hvac + HAS_HVAC = True +except ImportError: + HAS_HVAC = False + + +class LookupModule(HashiVaultLookupBase): + def run(self, terms, variables=None, **kwargs): + if not HAS_HVAC: + raise AnsibleError("Please pip install hvac to use the hashi_vault lookup module.") + + ret = [] + + for term in terms: + opts = kwargs.copy() + opts.update(self.parse_kev_term(term, first_unqualified='secret', plugin_name='hashi_vault')) + self.set_options(direct=opts, var_options=variables) + # TODO: remove process_deprecations() if backported fix is available (see method definition) + self.process_deprecations() + self.process_options() + + client_args = self.connection_options.get_hvac_connection_options() + self.client = self.helper.get_vault_client(**client_args) + + try: + self.authenticator.authenticate(self.client) + except (NotImplementedError, HashiVaultValueError) as e: + raise AnsibleError(e) + + ret.extend(self.get()) + + return ret + + def process_options(self): + '''performs deep validation and value loading for options''' + + # process connection options + self.connection_options.process_connection_options() + + try: + self.authenticator.validate() + except (NotImplementedError, HashiVaultValueError) as e: + raise AnsibleError(e) + + # secret field splitter + self.field_ops() + + # begin options processing methods + + def field_ops(self): + # split secret and field + secret = self.get_option('secret') + + s_f = secret.rsplit(':', 1) + self.set_option('secret', s_f[0]) + if len(s_f) >= 2: + field = s_f[1] + else: + field = None + self.set_option('secret_field', field) + + def get(self): + '''gets a secret. should always return a list''' + + secret = self.get_option('secret') + field = self.get_option('secret_field') + return_as = self.get_option('return_format') + + try: + data = self.client.read(secret) + except hvac.exceptions.Forbidden: + raise AnsibleError("Forbidden: Permission Denied to secret '%s'." % secret) + + if data is None: + raise AnsibleError("The secret '%s' doesn't seem to exist." % secret) + + if return_as == 'raw': + return [data] + + # Check response for KV v2 fields and flatten nested secret data. + # https://vaultproject.io/api/secret/kv/kv-v2.html#sample-response-1 + try: + # sentinel field checks + check_dd = data['data']['data'] + check_md = data['data']['metadata'] + # unwrap nested data + data = data['data'] + except KeyError: + pass + + if return_as == 'values': + return list(data['data'].values()) + + # everything after here implements return_as == 'dict' + if not field: + return [data['data']] + + if field not in data['data']: + raise AnsibleError("The secret %s does not contain the field '%s'. for hashi_vault lookup" % (secret, field)) + + return [data['data'][field]] diff --git a/ansible_collections/community/hashi_vault/plugins/lookup/vault_ansible_settings.py b/ansible_collections/community/hashi_vault/plugins/lookup/vault_ansible_settings.py new file mode 100644 index 000000000..f3103a959 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/lookup/vault_ansible_settings.py @@ -0,0 +1,337 @@ +# (c) 2022, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = r''' +name: vault_ansible_settings +version_added: 2.5.0 +author: + - Brian Scholer (@briantist) +short_description: Returns plugin settings (options) +description: + - Returns a dictionary of options and their values for a given plugin. + - This is most useful for using plugin settings in modules and C(module_defaults), + especially when common settings are set in C(ansible.cfg), in Ansible vars, or via environment variables on the controller. + - Options can be filtered by name, and can include or exclude defaults, unset options, and private options. +seealso: + - ref: Module defaults <module_defaults> + description: Using the C(module_defaults) keyword. +notes: + - This collection supports some "low precedence" environment variables that get loaded after all other sources, such as C(VAULT_ADDR). + - These environment variables B(are not supported) with this plugin. + - If you wish to use them, use the R(ansible.builtin.env lookup,ansible_collections.ansible.builtin.env_lookup) to + load them directly when calling a module or setting C(module_defaults). + - Similarly, any options that rely on additional processing to fill in their values will not have that done. + - For example, tokens will not be loaded from the token sink file, auth methods will not have their C(validate) methods called. + - See the examples for workarounds, but consider using Ansible-specific ways of setting these values instead. +options: + _terms: + description: + - The names of the options to load. + - Supports C(fnmatch) L(style wildcards,https://docs.python.org/3/library/fnmatch.html). + - Prepend any name or pattern with C(!) to invert the match. + type: list + elements: str + required: false + default: ['*'] + plugin: + description: + - The name of the plugin whose options will be returned. + - Only lookups are supported. + - Short names (without a dot C(.)) will be fully qualified with C(community.hashi_vault). + type: str + default: community.hashi_vault.vault_login + include_private: + description: Include options that begin with underscore C(_). + type: bool + default: false + include_none: + description: Include options whose value is C(None) (this usually means they are unset). + type: bool + default: false + include_default: + description: Include options whose value comes from a default. + type: bool + default: false +''' + +EXAMPLES = r''' +### In these examples, we assume an ansible.cfg like this: +# [hashi_vault_collection] +# url = https://config-based-vault.example.com +# retries = 5 +### end ansible.cfg + +### We assume some environment variables set as well +# ANSIBLE_HASHI_VAULT_URL: https://env-based-vault.example.com +# ANSIBLE_HASHI_VAULT_TOKEN: s.123456789 +### end environment variables + +# playbook - ansible-core 2.12 and higher +## set defaults for the collection group +- hosts: all + vars: + ansible_hashi_vault_auth_method: token + module_defaults: + group/community.hashi_vault.vault: "{{ lookup('community.hashi_vault.vault_ansible_settings') }}" + tasks: + - name: Get a secret from the remote host with settings from the controller + community.hashi_vault.vault_kv2_get: + path: app/some/secret +###### + +# playbook - ansible any version +## set defaults for a specific module +- hosts: all + vars: + ansible_hashi_vault_auth_method: token + module_defaults: + community.hashi_vault.vault_kv2_get: "{{ lookup('community.hashi_vault.vault_ansible_settings') }}" + tasks: + - name: Get a secret from the remote host with settings from the controller + community.hashi_vault.vault_kv2_get: + path: app/some/secret +###### + +# playbook - ansible any version +## set defaults for several modules +## do not use controller's auth +- hosts: all + vars: + ansible_hashi_vault_auth_method: aws_iam + settings: "{{ lookup('community.hashi_vault.vault_ansible_settings', '*', '!*token*') }}" + module_defaults: + community.hashi_vault.vault_kv2_get: '{{ settings }}' + community.hashi_vault.vault_kv1_get: '{{ settings }}' + tasks: + - name: Get a secret from the remote host with some settings from the controller, auth from remote + community.hashi_vault.vault_kv2_get: + path: app/some/secret + + - name: Same with kv1 + community.hashi_vault.vault_kv1_get: + path: app/some/secret +###### + +# playbook - ansible any version +## set defaults for several modules +## do not use controller's auth +## override returned settings +- hosts: all + vars: + ansible_hashi_vault_auth_method: userpass + plugin_settings: "{{ lookup('community.hashi_vault.vault_ansible_settings', '*', '!*token*') }}" + overrides: + auth_method: aws_iam + retries: '{{ (plugin_settings.retries | int) + 2 }}' + settings: >- + {{ + plugin_settings + | combine(overrides) + }} + module_defaults: + community.hashi_vault.vault_kv2_get: '{{ settings }}' + community.hashi_vault.vault_kv1_get: '{{ settings }}' + tasks: + - name: Get a secret from the remote host with some settings from the controller, auth from remote + community.hashi_vault.vault_kv2_get: + path: app/some/secret + + - name: Same with kv1 + community.hashi_vault.vault_kv1_get: + path: app/some/secret +###### + +# using a block is similar +- name: Settings + vars: + ansible_hashi_vault_auth_method: aws_iam + settings: "{{ lookup('community.hashi_vault.vault_ansible_settings', '*', '!*token*') }}" + module_defaults: + community.hashi_vault.vault_kv2_get: '{{ settings }}' + community.hashi_vault.vault_kv1_get: '{{ settings }}' + block: + - name: Get a secret from the remote host with some settings from the controller, auth from remote + community.hashi_vault.vault_kv2_get: + path: app/some/secret + + - name: Same with kv1 + community.hashi_vault.vault_kv1_get: + path: app/some/secret +##### + +# use settings from a different plugin +## when you need settings that are not in the default plugin (vault_login) +- name: Settings + vars: + ansible_hashi_vault_engine_mount_point: dept-secrets + settings: "{{ lookup('community.hashi_vault.vault_ansible_settings', plugin='community.hashi_vault.vault_kv2_get') }}" + module_defaults: + community.hashi_vault.vault_kv2_get: '{{ settings }}' + block: + - name: Get a secret from the remote host with some settings from the controller, auth from remote + community.hashi_vault.vault_kv2_get: + path: app/some/secret +##### + +# use settings from a different plugin (on an indivdual call) +## short names assume community.hashi_vault +- name: Settings + vars: + ansible_hashi_vault_engine_mount_point: dept-secrets + settings: "{{ lookup('community.hashi_vault.vault_ansible_settings') }}" + module_defaults: + community.hashi_vault.vault_kv2_get: '{{ settings }}' + block: + - name: Get a secret from the remote host with some settings from the controller, auth from remote + community.hashi_vault.vault_kv2_get: + engine_mount_point: "{{ lookup('community.hashi_vault.vault_ansible_settings', plugin='vault_kv2_get') }}" + path: app/some/secret +##### + +# normally, options with default values are not returned, but can be +- name: Settings + vars: + settings: "{{ lookup('community.hashi_vault.vault_ansible_settings') }}" + module_defaults: + # we usually want to use the remote host's IAM auth + community.hashi_vault.vault_kv2_get: >- + {{ + settings + | combine({'auth_method': aws_iam}) + }} + block: + - name: Use the plugin auth method instead, even if it is the default method + community.hashi_vault.vault_kv2_get: + auth_method: "{{ lookup('community.hashi_vault.vault_ansible_settings', 'auth_method', include_default=True) }}" + path: app/some/secret +##### + +# normally, options with None/null values are not returned, +# nor are private options (names begin with underscore _), +# but they can be returned too if desired +- name: Show all plugin settings + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.vault_ansible_settings', include_none=True, include_private=True, include_default=True) }}" +##### + +# dealing with low-precedence env vars and token sink loading +## here, VAULT_ADDR is usually used with plugins, but that will not work with vault_ansible_settings. +## additionally, the CLI `vault login` is used before running Ansible, so the token sink is usually used, which also will not work. +- hosts: all + vars: + plugin_settings: "{{ lookup('community.hashi_vault.vault_ansible_settings', 'url', 'token*', include_default=True) }}" + overrides: + url: "{{ plugin_settings.url | default(lookup('ansible.builtin.env', 'VAULT_ADDR')) }}" + token: >- + {{ + plugin_settings.token + | default( + lookup( + 'ansible.builtin.file', + ( + plugin_settings.token_path | default(lookup('ansible.builtin.env', 'HOME')), + plugin_settings.token_file + ) | path_join + ) + ) + }} + auth_method: token + settings: >- + {{ + plugin_settings + | combine(overrides) + }} + module_defaults: + community.hashi_vault.vault_kv2_get: "{{ lookup('community.hashi_vault.vault_ansible_settings') }}" + tasks: + - name: Get a secret from the remote host with settings from the controller + community.hashi_vault.vault_kv2_get: + path: app/some/secret +##### +''' + +RETURN = r''' +_raw: + description: + - A dictionary of the options and their values. + - Only a single dictionary will be returned, even with multiple terms. + type: dict + sample: + retries: 5 + timeout: 20 + token: s.jRHAoqElnJDx6J5ExYelCDYR + url: https://vault.example.com +''' + +from fnmatch import fnmatchcase + +from ansible.errors import AnsibleError +from ansible.plugins.lookup import LookupBase +from ansible import constants as C +from ansible.plugins.loader import lookup_loader +from ansible.utils.display import Display + + +display = Display() + + +class LookupModule(LookupBase): + def run(self, terms, variables=None, **kwargs): + self.set_options(direct=kwargs, var_options=variables) + + include_private = self.get_option('include_private') + include_none = self.get_option('include_none') + include_default = self.get_option('include_default') + + plugin = self.get_option('plugin') + if '.' not in plugin: + plugin = 'community.hashi_vault.' + plugin + + if not terms: + terms = ['*'] + + opts = {} + + try: + # ansible-core 2.10 or later + p = lookup_loader.find_plugin_with_context(plugin) + loadname = p.plugin_resolved_name + resolved = p.resolved + except AttributeError: + # ansible 2.9 + p = lookup_loader.find_plugin_with_name(plugin) + loadname = p[0] + resolved = loadname is not None + + if not resolved: + raise AnsibleError("'%s' plugin not found." % plugin) + + # Loading ensures that the options are initialized in ConfigManager + lookup_loader.get(plugin, class_only=True) + + pluginget = C.config.get_configuration_definitions('lookup', loadname) + + for option in pluginget.keys(): + if not include_private and option.startswith('_'): + continue + + keep = False + for pattern in terms: + if pattern.startswith('!'): + if keep and fnmatchcase(option, pattern[1:]): + keep = False + else: + keep = keep or fnmatchcase(option, pattern) + + if not keep: + continue + + value, origin = C.config.get_config_value_and_origin(option, None, 'lookup', loadname, None, variables=variables) + if (include_none or value is not None) and (include_default or origin != 'default'): + opts[option] = value + + return [opts] diff --git a/ansible_collections/community/hashi_vault/plugins/lookup/vault_kv1_get.py b/ansible_collections/community/hashi_vault/plugins/lookup/vault_kv1_get.py new file mode 100644 index 000000000..053150a80 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/lookup/vault_kv1_get.py @@ -0,0 +1,220 @@ +# (c) 2022, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = r''' +name: vault_kv1_get +version_added: 2.5.0 +author: + - Brian Scholer (@briantist) +short_description: Get a secret from HashiCorp Vault's KV version 1 secret store +requirements: + - C(hvac) (L(Python library,https://hvac.readthedocs.io/en/stable/overview.html)) + - For detailed requirements, see R(the collection requirements page,ansible_collections.community.hashi_vault.docsite.user_guide.requirements). +description: + - Gets a secret from HashiCorp Vault's KV version 1 secret store. +seealso: + - module: community.hashi_vault.vault_kv1_get + - ref: community.hashi_vault.vault_kv2_get lookup <ansible_collections.community.hashi_vault.vault_kv2_get_lookup> + description: The official documentation for the C(community.hashi_vault.vault_kv2_get) lookup plugin. + - module: community.hashi_vault.vault_kv2_get + - ref: community.hashi_vault Lookup Guide <ansible_collections.community.hashi_vault.docsite.lookup_guide> + description: Guidance on using lookups in C(community.hashi_vault). + - name: KV1 Secrets Engine + description: Documentation for the Vault KV secrets engine, version 1. + link: https://www.vaultproject.io/docs/secrets/kv/kv-v1 +extends_documentation_fragment: + - community.hashi_vault.connection + - community.hashi_vault.connection.plugins + - community.hashi_vault.auth + - community.hashi_vault.auth.plugins + - community.hashi_vault.engine_mount + - community.hashi_vault.engine_mount.plugins +options: + _terms: + description: + - Vault KV path(s) to be read. + - These are relative to the I(engine_mount_point), so the mount path should not be included. + type: str + required: True + engine_mount_point: + default: kv +''' + +EXAMPLES = r''' +- name: Read a kv1 secret with the default mount point + ansible.builtin.set_fact: + response: "{{ lookup('community.hashi_vault.vault_kv1_get', 'hello', url='https://vault:8201') }}" + # equivalent API path is kv/hello + +- name: Display the results + ansible.builtin.debug: + msg: + - "Secret: {{ response.secret }}" + - "Data: {{ response.data }} (same as secret in kv1)" + - "Metadata: {{ response.metadata }} (response info in kv1)" + - "Full response: {{ response.raw }}" + - "Value of key 'password' in the secret: {{ response.secret.password }}" + +- name: Read a kv1 secret with a different mount point + ansible.builtin.set_fact: + response: "{{ lookup('community.hashi_vault.vault_kv1_get', 'hello', engine_mount_point='custom/kv1/mount', url='https://vault:8201') }}" + # equivalent API path is custom/kv1/mount/hello + +- name: Display the results + ansible.builtin.debug: + msg: + - "Secret: {{ response.secret }}" + - "Data: {{ response.data }} (same as secret in kv1)" + - "Metadata: {{ response.metadata }} (response info in kv1)" + - "Full response: {{ response.raw }}" + - "Value of key 'password' in the secret: {{ response.secret.password }}" + +- name: Perform multiple kv1 reads with a single Vault login, showing the secrets + vars: + paths: + - hello + - my-secret/one + - my-secret/two + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.vault_kv1_get', *paths, auth_method='userpass', username=user, password=pwd)['secret'] }}" + +- name: Perform multiple kv1 reads with a single Vault login in a loop + vars: + paths: + - hello + - my-secret/one + - my-secret/two + ansible.builtin.debug: + msg: '{{ item }}' + loop: "{{ query('community.hashi_vault.vault_kv1_get', *paths, auth_method='userpass', username=user, password=pwd) }}" + +- name: Perform multiple kv1 reads with a single Vault login in a loop (via with_), display values only + vars: + ansible_hashi_vault_auth_method: userpass + ansible_hashi_vault_username: '{{ user }}' + ansible_hashi_vault_password: '{{ pwd }}' + ansible.builtin.debug: + msg: '{{ item.values() | list }}' + with_community.hashi_vault.vault_kv1_get: + - hello + - my-secret/one + - my-secret/two +''' + +RETURN = r''' +_raw: + description: + - The result of the read(s) against the given path(s). + type: list + elements: dict + contains: + raw: + description: The raw result of the read against the given path. + returned: success + type: dict + sample: + auth: null + data: + Key1: value1 + Key2: value2 + lease_duration: 2764800 + lease_id: "" + renewable: false + request_id: e99f145f-f02a-7073-1229-e3f191057a70 + warnings: null + wrap_info: null + data: + description: The C(data) field of raw result. This can also be accessed via C(raw.data). + returned: success + type: dict + sample: + Key1: value1 + Key2: value2 + secret: + description: The C(data) field of the raw result. This is identical to C(data) in the return values. + returned: success + type: dict + sample: + Key1: value1 + Key2: value2 + metadata: + description: This is a synthetic result. It is the same as C(raw) with C(data) removed. + returned: success + type: dict + sample: + auth: null + lease_duration: 2764800 + lease_id: "" + renewable: false + request_id: e99f145f-f02a-7073-1229-e3f191057a70 + warnings: null + wrap_info: null +''' + +from ansible.errors import AnsibleError +from ansible.utils.display import Display + +from ansible.module_utils.six import raise_from + +from ansible_collections.community.hashi_vault.plugins.plugin_utils._hashi_vault_lookup_base import HashiVaultLookupBase +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultValueError + +display = Display() + +try: + import hvac +except ImportError as imp_exc: + HVAC_IMPORT_ERROR = imp_exc +else: + HVAC_IMPORT_ERROR = None + + +class LookupModule(HashiVaultLookupBase): + def run(self, terms, variables=None, **kwargs): + if HVAC_IMPORT_ERROR: + raise_from( + AnsibleError("This plugin requires the 'hvac' Python library"), + HVAC_IMPORT_ERROR + ) + + ret = [] + + self.set_options(direct=kwargs, var_options=variables) + # TODO: remove process_deprecations() if backported fix is available (see method definition) + self.process_deprecations() + + self.connection_options.process_connection_options() + client_args = self.connection_options.get_hvac_connection_options() + client = self.helper.get_vault_client(**client_args) + + engine_mount_point = self._options_adapter.get_option('engine_mount_point') + + try: + self.authenticator.validate() + self.authenticator.authenticate(client) + except (NotImplementedError, HashiVaultValueError) as e: + raise AnsibleError(e) + + for term in terms: + try: + raw = client.secrets.kv.v1.read_secret(path=term, mount_point=engine_mount_point) + except hvac.exceptions.Forbidden as e: + raise_from(AnsibleError("Forbidden: Permission Denied to path ['%s']." % term), e) + except hvac.exceptions.InvalidPath as e: + if 'Invalid path for a versioned K/V secrets engine' in str(e): + msg = "Invalid path for a versioned K/V secrets engine ['%s']. If this is a KV version 2 path, use community.hashi_vault.vault_kv2_get." + else: + msg = "Invalid or missing path ['%s']." + + raise_from(AnsibleError(msg % (term,)), e) + + metadata = raw.copy() + data = metadata.pop('data') + + ret.append(dict(raw=raw, data=data, secret=data, metadata=metadata)) + + return ret diff --git a/ansible_collections/community/hashi_vault/plugins/lookup/vault_kv2_get.py b/ansible_collections/community/hashi_vault/plugins/lookup/vault_kv2_get.py new file mode 100644 index 000000000..bbd54ef8d --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/lookup/vault_kv2_get.py @@ -0,0 +1,233 @@ +# (c) 2022, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = r''' +name: vault_kv2_get +version_added: 2.5.0 +author: + - Brian Scholer (@briantist) +short_description: Get a secret from HashiCorp Vault's KV version 2 secret store +requirements: + - C(hvac) (L(Python library,https://hvac.readthedocs.io/en/stable/overview.html)) + - For detailed requirements, see R(the collection requirements page,ansible_collections.community.hashi_vault.docsite.user_guide.requirements). +description: + - Gets a secret from HashiCorp Vault's KV version 2 secret store. +seealso: + - module: community.hashi_vault.vault_kv2_get + - ref: community.hashi_vault.vault_kv1_get lookup <ansible_collections.community.hashi_vault.vault_kv1_get_lookup> + description: The official documentation for the C(community.hashi_vault.vault_kv1_get) lookup plugin. + - module: community.hashi_vault.vault_kv1_get + - ref: community.hashi_vault Lookup Guide <ansible_collections.community.hashi_vault.docsite.lookup_guide> + description: Guidance on using lookups in C(community.hashi_vault). + - name: KV2 Secrets Engine + description: Documentation for the Vault KV secrets engine, version 2. + link: https://www.vaultproject.io/docs/secrets/kv/kv-v2 +extends_documentation_fragment: + - community.hashi_vault.connection + - community.hashi_vault.connection.plugins + - community.hashi_vault.auth + - community.hashi_vault.auth.plugins + - community.hashi_vault.engine_mount + - community.hashi_vault.engine_mount.plugins +options: + _terms: + description: + - Vault KV path(s) to be read. + - These are relative to the I(engine_mount_point), so the mount path should not be included. + type: str + required: True + engine_mount_point: + default: secret + version: + description: Specifies the version to return. If not set the latest version is returned. + type: int +''' + +EXAMPLES = r''' +- name: Read a kv2 secret with the default mount point + ansible.builtin.set_fact: + response: "{{ lookup('community.hashi_vault.vault_kv2_get', 'hello', url='https://vault:8201') }}" + # equivalent API path in 3.x.x is kv/data/hello + # equivalent API path in 4.0.0+ is secret/data/hello + +- name: Display the results + ansible.builtin.debug: + msg: + - "Secret: {{ response.secret }}" + - "Data: {{ response.data }} (contains secret data & metadata in kv2)" + - "Metadata: {{ response.metadata }}" + - "Full response: {{ response.raw }}" + - "Value of key 'password' in the secret: {{ response.secret.password }}" + +- name: Read version 5 of a kv2 secret with a different mount point + ansible.builtin.set_fact: + response: "{{ lookup('community.hashi_vault.vault_kv2_get', 'hello', version=5, engine_mount_point='custom/kv2/mount', url='https://vault:8201') }}" + # equivalent API path is custom/kv2/mount/data/hello + +- name: Assert that the version returned is as expected + ansible.builtin.assert: + that: + - response.metadata.version == 5 + +- name: Perform multiple kv2 reads with a single Vault login, showing the secrets + vars: + paths: + - hello + - my-secret/one + - my-secret/two + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.vault_kv2_get', *paths, auth_method='userpass', username=user, password=pwd)['secret'] }}" + +- name: Perform multiple kv2 reads with a single Vault login in a loop + vars: + paths: + - hello + - my-secret/one + - my-secret/two + ansible.builtin.debug: + msg: '{{ item }}' + loop: "{{ query('community.hashi_vault.vault_kv2_get', *paths, auth_method='userpass', username=user, password=pwd) }}" + +- name: Perform multiple kv2 reads with a single Vault login in a loop (via with_), display values only + vars: + ansible_hashi_vault_auth_method: userpass + ansible_hashi_vault_username: '{{ user }}' + ansible_hashi_vault_password: '{{ pwd }}' + ansible_hashi_vault_engine_mount_point: special/kv2 + ansible.builtin.debug: + msg: '{{ item.values() | list }}' + with_community.hashi_vault.vault_kv2_get: + - hello + - my-secret/one + - my-secret/two +''' + +RETURN = r''' +_raw: + description: + - The result of the read(s) against the given path(s). + type: list + elements: dict + contains: + raw: + description: The raw result of the read against the given path. + returned: success + type: dict + sample: + auth: null + data: + data: + Key1: value1 + Key2: value2 + metadata: + created_time: "2022-04-21T15:56:58.8525402Z" + custom_metadata: null + deletion_time: "" + destroyed: false + version: 2 + lease_duration: 0 + lease_id: "" + renewable: false + request_id: dc829675-9119-e831-ae74-35fc5d33d200 + warnings: null + wrap_info: null + data: + description: The C(data) field of raw result. This can also be accessed via C(raw.data). + returned: success + type: dict + sample: + data: + Key1: value1 + Key2: value2 + metadata: + created_time: "2022-04-21T15:56:58.8525402Z" + custom_metadata: null + deletion_time: "" + destroyed: false + version: 2 + secret: + description: The C(data) field within the C(data) field. Equivalent to C(raw.data.data). + returned: success + type: dict + sample: + Key1: value1 + Key2: value2 + metadata: + description: The C(metadata) field within the C(data) field. Equivalent to C(raw.data.metadata). + returned: success + type: dict + sample: + created_time: "2022-04-21T15:56:58.8525402Z" + custom_metadata: null + deletion_time: "" + destroyed: false + version: 2 +''' + +from ansible.errors import AnsibleError +from ansible.utils.display import Display + +from ansible.module_utils.six import raise_from + +from ansible_collections.community.hashi_vault.plugins.plugin_utils._hashi_vault_lookup_base import HashiVaultLookupBase +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultValueError + +display = Display() + +try: + import hvac +except ImportError as imp_exc: + HVAC_IMPORT_ERROR = imp_exc +else: + HVAC_IMPORT_ERROR = None + + +class LookupModule(HashiVaultLookupBase): + def run(self, terms, variables=None, **kwargs): + if HVAC_IMPORT_ERROR: + raise_from( + AnsibleError("This plugin requires the 'hvac' Python library"), + HVAC_IMPORT_ERROR + ) + + ret = [] + + self.set_options(direct=kwargs, var_options=variables) + # TODO: remove process_deprecations() if backported fix is available (see method definition) + self.process_deprecations() + + self.connection_options.process_connection_options() + client_args = self.connection_options.get_hvac_connection_options() + client = self.helper.get_vault_client(**client_args) + + version = self._options_adapter.get_option_default('version') + engine_mount_point = self._options_adapter.get_option('engine_mount_point') + + try: + self.authenticator.validate() + self.authenticator.authenticate(client) + except (NotImplementedError, HashiVaultValueError) as e: + raise AnsibleError(e) + + for term in terms: + try: + raw = client.secrets.kv.v2.read_secret_version(path=term, version=version, mount_point=engine_mount_point) + except hvac.exceptions.Forbidden as e: + raise_from(AnsibleError("Forbidden: Permission Denied to path ['%s']." % term), e) + except hvac.exceptions.InvalidPath as e: + raise_from( + AnsibleError("Invalid or missing path ['%s'] with secret version '%s'. Check the path or secret version." % (term, version or 'latest')), + e + ) + + data = raw['data'] + metadata = data['metadata'] + secret = data['data'] + + ret.append(dict(raw=raw, data=data, secret=secret, metadata=metadata)) + + return ret diff --git a/ansible_collections/community/hashi_vault/plugins/lookup/vault_list.py b/ansible_collections/community/hashi_vault/plugins/lookup/vault_list.py new file mode 100644 index 000000000..56521c792 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/lookup/vault_list.py @@ -0,0 +1,183 @@ +# (c) 2023, Tom Kivlin (@tomkivlin) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = """ + name: vault_list + version_added: 4.1.0 + author: + - Tom Kivlin (@tomkivlin) + short_description: Perform a list operation against HashiCorp Vault + requirements: + - C(hvac) (L(Python library,https://hvac.readthedocs.io/en/stable/overview.html)) + - For detailed requirements, see R(the collection requirements page,ansible_collections.community.hashi_vault.docsite.user_guide.requirements). + description: + - Performs a generic list operation against a given path in HashiCorp Vault. + seealso: + - module: community.hashi_vault.vault_list + extends_documentation_fragment: + - community.hashi_vault.connection + - community.hashi_vault.connection.plugins + - community.hashi_vault.auth + - community.hashi_vault.auth.plugins + options: + _terms: + description: Vault path(s) to be listed. + type: str + required: true +""" + +EXAMPLES = """ +- name: List all secrets at a path + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.vault_list', 'secret/metadata', url='https://vault:8201') }}" + # For kv2, the path needs to follow the pattern 'mount_point/metadata' or 'mount_point/metadata/path' to list all secrets in that path + +- name: List access policies + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.vault_list', 'sys/policies/acl', url='https://vault:8201') }}" + +- name: Perform multiple list operations with a single Vault login + vars: + paths: + - secret/metadata + - sys/policies/acl + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.vault_list', *paths, auth_method='userpass', username=user, password=pwd) }}" + +- name: Perform multiple list operations with a single Vault login in a loop + vars: + paths: + - secret/metadata + - sys/policies/acl + ansible.builtin.debug: + msg: '{{ item }}' + loop: "{{ query('community.hashi_vault.vault_list', *paths, auth_method='userpass', username=user, password=pwd) }}" + +- name: Perform list operations with a single Vault login in a loop (via with_) + vars: + ansible_hashi_vault_auth_method: userpass + ansible_hashi_vault_username: '{{ user }}' + ansible_hashi_vault_password: '{{ pwd }}' + ansible.builtin.debug: + msg: '{{ item }}' + with_community.hashi_vault.vault_list: + - secret/metadata + - sys/policies/acl + +- name: Create fact consisting of list of dictionaries each with secret name (e.g. username) and value of a key (e.g. 'password') within that secret + ansible.builtin.set_fact: + credentials: >- + {{ + credentials + | default([]) + [ + { + 'username': item, + 'password': lookup('community.hashi_vault.vault_kv2_get', item, engine_mount_point='vpn-users').secret.password + } + ] + }} + loop: "{{ query('community.hashi_vault.vault_list', 'vpn-users/metadata')[0].data['keys'] }}" + no_log: true + +- ansible.builtin.debug: + msg: "{{ credentials }}" + +- name: Create the same as above without looping, and only 2 logins + vars: + secret_names: >- + {{ + query('community.hashi_vault.vault_list', 'vpn-users/metadata') + | map(attribute='data') + | map(attribute='keys') + | flatten + }} + secret_values: >- + {{ + lookup('community.hashi_vault.vault_kv2_get', *secret_names, engine_mount_point='vpn-users') + | map(attribute='secret') + | map(attribute='password') + | flatten + }} + credentials_dict: "{{ dict(secret_names | zip(secret_values)) }}" + ansible.builtin.set_fact: + credentials_dict: "{{ credentials_dict }}" + credentials_list: "{{ credentials_dict | dict2items(key_name='username', value_name='password') }}" + no_log: true + +- ansible.builtin.debug: + msg: + - "Dictionary: {{ credentials_dict }}" + - "List: {{ credentials_list }}" + +- name: List all userpass users and output the token policies for each user + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.vault_read', 'auth/userpass/users/' + item).data.token_policies }}" + loop: "{{ query('community.hashi_vault.vault_list', 'auth/userpass/users')[0].data['keys'] }}" +""" + +RETURN = """ +_raw: + description: + - The raw result of the read against the given path. + type: list + elements: dict +""" + +from ansible.errors import AnsibleError +from ansible.utils.display import Display + +from ansible.module_utils.six import raise_from + +from ansible_collections.community.hashi_vault.plugins.plugin_utils._hashi_vault_lookup_base import HashiVaultLookupBase +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultValueError + +display = Display() + +try: + import hvac +except ImportError as imp_exc: + HVAC_IMPORT_ERROR = imp_exc +else: + HVAC_IMPORT_ERROR = None + + +class LookupModule(HashiVaultLookupBase): + def run(self, terms, variables=None, **kwargs): + if HVAC_IMPORT_ERROR: + raise_from( + AnsibleError("This plugin requires the 'hvac' Python library"), + HVAC_IMPORT_ERROR + ) + + ret = [] + + self.set_options(direct=kwargs, var_options=variables) + # TODO: remove process_deprecations() if backported fix is available (see method definition) + self.process_deprecations() + + self.connection_options.process_connection_options() + client_args = self.connection_options.get_hvac_connection_options() + client = self.helper.get_vault_client(**client_args) + + try: + self.authenticator.validate() + self.authenticator.authenticate(client) + except (NotImplementedError, HashiVaultValueError) as e: + raise AnsibleError(e) + + for term in terms: + try: + data = client.list(term) + except hvac.exceptions.Forbidden: + raise AnsibleError("Forbidden: Permission Denied to path '%s'." % term) + + if data is None: + raise AnsibleError("The path '%s' doesn't seem to exist." % term) + + ret.append(data) + + return ret diff --git a/ansible_collections/community/hashi_vault/plugins/lookup/vault_login.py b/ansible_collections/community/hashi_vault/plugins/lookup/vault_login.py new file mode 100644 index 000000000..27d497965 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/lookup/vault_login.py @@ -0,0 +1,138 @@ +# (c) 2021, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = """ + name: vault_login + version_added: 2.2.0 + author: + - Brian Scholer (@briantist) + short_description: Perform a login operation against HashiCorp Vault + requirements: + - C(hvac) (L(Python library,https://hvac.readthedocs.io/en/stable/overview.html)) + - For detailed requirements, see R(the collection requirements page,ansible_collections.community.hashi_vault.docsite.user_guide.requirements). + description: + - Performs a login operation against a given path in HashiCorp Vault, returning the login response, including the token. + seealso: + - module: community.hashi_vault.vault_login + - ref: community.hashi_vault.vault_login_token filter <ansible_collections.community.hashi_vault.vault_login_token_filter> + description: The official documentation for the C(community.hashi_vault.vault_login_token) filter plugin. + notes: + - This lookup does not use the term string and will not work correctly in loops. Only a single response will be returned. + - "A login is a write operation (creating a token persisted to storage), so this module always reports C(changed=True), + except when used with C(token) auth, because no new token is created in that case. For the purposes of Ansible playbooks however, + it may be more useful to set C(changed_when=false) if you're doing idempotency checks against the target system." + - The C(none) auth method is not valid for this plugin because there is no response to return. + - "With C(token) auth, no actual login is performed. + Instead, the given token's additional information is returned in a structure that resembles what login responses look like." + - "The C(token) auth method will only return full information if I(token_validate=True). + If the token does not have the C(lookup-self) capability, this will fail. If I(token_validate=False), only the token value itself + will be returned in the structure." + extends_documentation_fragment: + - community.hashi_vault.connection + - community.hashi_vault.connection.plugins + - community.hashi_vault.auth + - community.hashi_vault.auth.plugins + options: + _terms: + description: This is unused and any terms supplied will be ignored. + type: str + required: false + token_validate: + default: true +""" + +EXAMPLES = """ +- name: Set a fact with a lookup result + set_fact: + login_data: "{{ lookup('community.hashi_vault.vault_login', url='https://vault', auth_method='userpass', username=user, password=pwd) }}" + +- name: Retrieve an approle role ID (token via filter) + community.hashi_vault.vault_read: + url: https://vault:8201 + auth_method: token + token: '{{ login_data | community.hashi_vault.vault_login_token }}' + path: auth/approle/role/role-name/role-id + register: approle_id + +- name: Retrieve an approle role ID (token via direct dict access) + community.hashi_vault.vault_read: + url: https://vault:8201 + auth_method: token + token: '{{ login_data.auth.client_token }}' + path: auth/approle/role/role-name/role-id + register: approle_id +""" + +RETURN = """ +_raw: + description: + - The result of the login with the given auth method. + type: list + elements: dict + contains: + auth: + description: The C(auth) member of the login response. + returned: success + type: dict + contains: + client_token: + description: Contains the token provided by the login operation (or the input token when I(auth_method=token)). + returned: success + type: str + data: + description: The C(data) member of the login response. + returned: success, when available + type: dict +""" + +from ansible.errors import AnsibleError +from ansible.utils.display import Display + +from ansible.module_utils.six import raise_from + +from ...plugins.plugin_utils._hashi_vault_lookup_base import HashiVaultLookupBase +from ...plugins.module_utils._hashi_vault_common import HashiVaultValueError + +display = Display() + +try: + import hvac # pylint: disable=unused-import +except ImportError as imp_exc: + HVAC_IMPORT_ERROR = imp_exc +else: + HVAC_IMPORT_ERROR = None + + +class LookupModule(HashiVaultLookupBase): + def run(self, terms, variables=None, **kwargs): + if HVAC_IMPORT_ERROR: + raise_from( + AnsibleError("This plugin requires the 'hvac' Python library"), + HVAC_IMPORT_ERROR + ) + + self.set_options(direct=kwargs, var_options=variables) + # TODO: remove process_deprecations() if backported fix is available (see method definition) + self.process_deprecations() + + if self.get_option('auth_method') == 'none': + raise AnsibleError("The 'none' auth method is not valid for this lookup.") + + self.connection_options.process_connection_options() + client_args = self.connection_options.get_hvac_connection_options() + client = self.helper.get_vault_client(**client_args) + + if len(terms) != 0: + display.warning("Supplied term strings will be ignored. This lookup does not use term strings.") + + try: + self.authenticator.validate() + response = self.authenticator.authenticate(client) + except (NotImplementedError, HashiVaultValueError) as e: + raise AnsibleError(e) + + return [response] diff --git a/ansible_collections/community/hashi_vault/plugins/lookup/vault_read.py b/ansible_collections/community/hashi_vault/plugins/lookup/vault_read.py new file mode 100644 index 000000000..794262ed6 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/lookup/vault_read.py @@ -0,0 +1,137 @@ +# (c) 2021, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = """ + name: vault_read + version_added: 1.4.0 + author: + - Brian Scholer (@briantist) + short_description: Perform a read operation against HashiCorp Vault + requirements: + - C(hvac) (L(Python library,https://hvac.readthedocs.io/en/stable/overview.html)) + - For detailed requirements, see R(the collection requirements page,ansible_collections.community.hashi_vault.docsite.user_guide.requirements). + description: + - Performs a generic read operation against a given path in HashiCorp Vault. + seealso: + - module: community.hashi_vault.vault_read + - ref: community.hashi_vault.hashi_vault lookup <ansible_collections.community.hashi_vault.hashi_vault_lookup> + description: The official documentation for the C(community.hashi_vault.hashi_vault) lookup plugin. + extends_documentation_fragment: + - community.hashi_vault.connection + - community.hashi_vault.connection.plugins + - community.hashi_vault.auth + - community.hashi_vault.auth.plugins + options: + _terms: + description: Vault path(s) to be read. + type: str + required: True +""" + +EXAMPLES = """ +- name: Read a kv2 secret + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.vault_read', 'secret/data/hello', url='https://vault:8201') }}" + +- name: Retrieve an approle role ID + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.vault_read', 'auth/approle/role/role-name/role-id', url='https://vault:8201') }}" + +- name: Perform multiple reads with a single Vault login + vars: + paths: + - secret/data/hello + - auth/approle/role/role-one/role-id + - auth/approle/role/role-two/role-id + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.vault_read', *paths, auth_method='userpass', username=user, password=pwd) }}" + +- name: Perform multiple reads with a single Vault login in a loop + vars: + paths: + - secret/data/hello + - auth/approle/role/role-one/role-id + - auth/approle/role/role-two/role-id + ansible.builtin.debug: + msg: '{{ item }}' + loop: "{{ query('community.hashi_vault.vault_read', *paths, auth_method='userpass', username=user, password=pwd) }}" + +- name: Perform multiple reads with a single Vault login in a loop (via with_) + vars: + ansible_hashi_vault_auth_method: userpass + ansible_hashi_vault_username: '{{ user }}' + ansible_hashi_vault_password: '{{ pwd }}' + ansible.builtin.debug: + msg: '{{ item }}' + with_community.hashi_vault.vault_read: + - secret/data/hello + - auth/approle/role/role-one/role-id + - auth/approle/role/role-two/role-id +""" + +RETURN = """ +_raw: + description: + - The raw result of the read against the given path. + type: list + elements: dict +""" + +from ansible.errors import AnsibleError +from ansible.utils.display import Display + +from ansible.module_utils.six import raise_from + +from ansible_collections.community.hashi_vault.plugins.plugin_utils._hashi_vault_lookup_base import HashiVaultLookupBase +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultValueError + +display = Display() + +try: + import hvac +except ImportError as imp_exc: + HVAC_IMPORT_ERROR = imp_exc +else: + HVAC_IMPORT_ERROR = None + + +class LookupModule(HashiVaultLookupBase): + def run(self, terms, variables=None, **kwargs): + if HVAC_IMPORT_ERROR: + raise_from( + AnsibleError("This plugin requires the 'hvac' Python library"), + HVAC_IMPORT_ERROR + ) + + ret = [] + + self.set_options(direct=kwargs, var_options=variables) + # TODO: remove process_deprecations() if backported fix is available (see method definition) + self.process_deprecations() + + self.connection_options.process_connection_options() + client_args = self.connection_options.get_hvac_connection_options() + client = self.helper.get_vault_client(**client_args) + + try: + self.authenticator.validate() + self.authenticator.authenticate(client) + except (NotImplementedError, HashiVaultValueError) as e: + raise AnsibleError(e) + + for term in terms: + try: + data = client.read(term) + except hvac.exceptions.Forbidden: + raise AnsibleError("Forbidden: Permission Denied to path '%s'." % term) + + if data is None: + raise AnsibleError("The path '%s' doesn't seem to exist." % term) + + ret.append(data) + + return ret diff --git a/ansible_collections/community/hashi_vault/plugins/lookup/vault_token_create.py b/ansible_collections/community/hashi_vault/plugins/lookup/vault_token_create.py new file mode 100644 index 000000000..520288897 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/lookup/vault_token_create.py @@ -0,0 +1,195 @@ +# (c) 2022, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = """ + name: vault_token_create + version_added: 2.3.0 + author: + - Brian Scholer (@briantist) + short_description: Create a HashiCorp Vault token + requirements: + - C(hvac) (L(Python library,https://hvac.readthedocs.io/en/stable/overview.html)) + - For detailed requirements, see R(the collection requirements page,ansible_collections.community.hashi_vault.docsite.user_guide.requirements). + description: + - Creates a token in HashiCorp Vault, returning the response, including the token. + seealso: + - module: community.hashi_vault.vault_token_create + - ref: community.hashi_vault.vault_login lookup <ansible_collections.community.hashi_vault.vault_login_lookup> + description: The official documentation for the C(community.hashi_vault.vault_login) lookup plugin. + - module: community.hashi_vault.vault_login + - ref: community.hashi_vault.vault_login_token filter <ansible_collections.community.hashi_vault.vault_login_token_filter> + description: The official documentation for the C(community.hashi_vault.vault_login_token) filter plugin. + notes: + - Token creation is a write operation (creating a token persisted to storage), so this module always reports C(changed=True). + - For the purposes of Ansible playbooks however, + it may be more useful to set I(changed_when=false) if you are doing idempotency checks against the target system. + - In check mode, this module will not create a token, and will instead return a basic structure with an empty token. + However, this may not be useful if the token is required for follow on tasks. + It may be better to use this module with I(check_mode=no) in order to have a valid token that can be used. + extends_documentation_fragment: + - community.hashi_vault.connection + - community.hashi_vault.connection.plugins + - community.hashi_vault.auth + - community.hashi_vault.auth.plugins + - community.hashi_vault.token_create + - community.hashi_vault.wrapping + - community.hashi_vault.wrapping.plugins + options: + _terms: + description: This is unused and any terms supplied will be ignored. + type: str + required: false +""" + +EXAMPLES = """ +- name: Login via userpass and create a child token + ansible.builtin.set_fact: + token_data: "{{ lookup('community.hashi_vault.vault_token_create', url='https://vault', auth_method='userpass', username=user, password=passwd) }}" + +- name: Retrieve an approle role ID using the child token (token via filter) + community.hashi_vault.vault_read: + url: https://vault:8201 + auth_method: token + token: '{{ token_data | community.hashi_vault.vault_login_token }}' + path: auth/approle/role/role-name/role-id + register: approle_id + +- name: Retrieve an approle role ID (token via direct dict access) + community.hashi_vault.vault_read: + url: https://vault:8201 + auth_method: token + token: '{{ token_data.auth.client_token }}' + path: auth/approle/role/role-name/role-id + register: approle_id + +# implicitly uses url & token auth with a token from the environment +- name: Create an orphaned token with a short TTL and display the full response + ansible.builtin.debug: + var: lookup('community.hashi_vault.vault_token_create', orphan=True, ttl='60s') +""" + +RETURN = """ +_raw: + description: The result of the token creation operation. + returned: success + type: dict + sample: + auth: + client_token: s.rlwajI2bblHAWU7uPqZhLru3 + data: null + contains: + auth: + description: The C(auth) member of the token response. + returned: success + type: dict + contains: + client_token: + description: Contains the newly created token. + returned: success + type: str + data: + description: The C(data) member of the token response. + returned: success, when available + type: dict +""" + +from ansible.errors import AnsibleError +from ansible.utils.display import Display + +from ansible.module_utils.six import raise_from + +from ...plugins.plugin_utils._hashi_vault_lookup_base import HashiVaultLookupBase +from ...plugins.module_utils._hashi_vault_common import HashiVaultValueError + +display = Display() + +try: + import hvac # pylint: disable=unused-import +except ImportError as imp_exc: + HVAC_IMPORT_ERROR = imp_exc +else: + HVAC_IMPORT_ERROR = None + + +class LookupModule(HashiVaultLookupBase): + PASS_THRU_OPTION_NAMES = [ + 'no_parent', + 'no_default_policy', + 'policies', + 'id', + 'role_name', + 'meta', + 'renewable', + 'ttl', + 'type', + 'explicit_max_ttl', + 'display_name', + 'num_uses', + 'period', + 'entity_alias', + 'wrap_ttl', + ] + + ORPHAN_OPTION_TRANSLATION = { + 'id': 'token_id', + 'role_name': 'role', + 'type': 'token_type', + } + + def run(self, terms, variables=None, **kwargs): + if HVAC_IMPORT_ERROR: + raise_from( + AnsibleError("This plugin requires the 'hvac' Python library"), + HVAC_IMPORT_ERROR + ) + + self.set_options(direct=kwargs, var_options=variables) + # TODO: remove process_deprecations() if backported fix is available (see method definition) + self.process_deprecations() + + self.connection_options.process_connection_options() + client_args = self.connection_options.get_hvac_connection_options() + client = self.helper.get_vault_client(**client_args) + + if len(terms) != 0: + display.warning("Supplied term strings will be ignored. This lookup does not use term strings.") + + try: + self.authenticator.validate() + self.authenticator.authenticate(client) + except (NotImplementedError, HashiVaultValueError) as e: + raise AnsibleError(e) + + pass_thru_options = self._options_adapter.get_filled_options(*self.PASS_THRU_OPTION_NAMES) + + orphan_options = pass_thru_options.copy() + + for key in pass_thru_options.keys(): + if key in self.ORPHAN_OPTION_TRANSLATION: + orphan_options[self.ORPHAN_OPTION_TRANSLATION[key]] = orphan_options.pop(key) + + response = None + + if self.get_option('orphan'): + try: + try: + # this method was added in hvac 1.0.0 + # See: https://github.com/hvac/hvac/pull/869 + response = client.auth.token.create_orphan(**orphan_options) + except AttributeError: + # this method was removed in hvac 1.0.0 + # See: https://github.com/hvac/hvac/issues/758 + response = client.create_token(orphan=True, **orphan_options) + except Exception as e: + raise AnsibleError(e) + else: + try: + response = client.auth.token.create(**pass_thru_options) + except Exception as e: + raise AnsibleError(e) + + return [response] diff --git a/ansible_collections/community/hashi_vault/plugins/lookup/vault_write.py b/ansible_collections/community/hashi_vault/plugins/lookup/vault_write.py new file mode 100644 index 000000000..6864c76fb --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/lookup/vault_write.py @@ -0,0 +1,192 @@ +# (c) 2022, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = """ + name: vault_write + version_added: 2.4.0 + author: + - Brian Scholer (@briantist) + short_description: Perform a write operation against HashiCorp Vault + requirements: + - C(hvac) (L(Python library,https://hvac.readthedocs.io/en/stable/overview.html)) + - For detailed requirements, see R(the collection requirements page,ansible_collections.community.hashi_vault.docsite.user_guide.requirements). + description: + - Performs a generic write operation against a given path in HashiCorp Vault, returning any output. + seealso: + - module: community.hashi_vault.vault_write + - module: community.hashi_vault.vault_kv2_write + - ref: community.hashi_vault.vault_read lookup <ansible_collections.community.hashi_vault.vault_read_lookup> + description: The official documentation for the C(community.hashi_vault.vault_read) lookup plugin. + - module: community.hashi_vault.vault_read + - ref: community.hashi_vault Lookup Guide <ansible_collections.community.hashi_vault.docsite.lookup_guide> + description: Guidance on using lookups in C(community.hashi_vault). + notes: + - C(vault_write) is a generic plugin to do operations that do not yet have a dedicated plugin. Where a specific plugin exists, that should be used instead. + - In the vast majority of cases, it will be better to do writes as a task, with the M(community.hashi_vault.vault_write) module. + - The lookup can be used in cases where you need a value directly in templating, but there is risk of executing the write many times unintentionally. + - The lookup is best used for endpoints that directly manipulate the input data and return a value, while not changing state in Vault. + - See the R(Lookup Guide,ansible_collections.community.hashi_vault.docsite.lookup_guide) for more information. + extends_documentation_fragment: + - community.hashi_vault.connection + - community.hashi_vault.connection.plugins + - community.hashi_vault.auth + - community.hashi_vault.auth.plugins + - community.hashi_vault.wrapping + - community.hashi_vault.wrapping.plugins + options: + _terms: + description: Vault path(s) to be written to. + type: str + required: true + data: + description: A dictionary to be serialized to JSON and then sent as the request body. + type: dict + required: false + default: {} +""" + +EXAMPLES = """ +# These examples show some uses that might work well as a lookup. +# For most uses, the vault_write module should be used. + +- name: Retrieve and display random data + vars: + data: + format: hex + num_bytes: 64 + ansible.builtin.debug: + msg: "{{ lookup('community.hashi_vault.vault_write', 'sys/tools/random/' ~ num_bytes, data=data) }}" + +- name: Hash some data and display the hash + vars: + input: | + Lorem ipsum dolor sit amet, consectetur adipiscing elit. + Pellentesque posuere dui a ipsum dapibus, et placerat nibh bibendum. + data: + input: '{{ input | b64encode }}' + hash_algo: sha2-256 + ansible.builtin.debug: + msg: "The hash is {{ lookup('community.hashi_vault.vault_write', 'sys/tools/hash/' ~ hash_algo, data=data) }}" + + +# In this next example, the Ansible controller's token does not have permission to read the secrets we need. +# It does have permission to generate new secret IDs for an approle which has permission to read the secrets, +# however the approle is configured to: +# 1) allow a maximum of 1 use per secret ID +# 2) restrict the IPs allowed to use login using the approle to those of the remote hosts +# +# Normally, the fact that a new secret ID would be generated on every loop iteration would not be desirable, +# but here it's quite convenient. + +- name: Retrieve secrets from the remote host with one-time-use approle creds + vars: + role_id: "{{ lookup('community.hashi_vault.vault_read', 'auth/approle/role/role-name/role-id') }}" + secret_id: "{{ lookup('community.hashi_vault.vault_write', 'auth/approle/role/role-name/secret-id') }}" + community.hashi_vault.vault_read: + auth_method: approle + role_id: '{{ role_id }}' + secret_id: '{{ secret_id }}' + path: '{{ item }}' + register: secret_data + loop: + - secret/data/secret1 + - secret/data/app/deploy-key + - secret/data/access-codes/self-destruct + + +# This time we have a secret values on the controller, and we need to run a command the remote host, +# that is expecting to a use single-use token as input, so we need to use wrapping to send the data. + +- name: Run a command that needs wrapped secrets + vars: + secrets: + secret1: '{{ my_secret_1 }}' + secret2: '{{ second_secret }}' + wrapped: "{{ lookup('community.hashi_vault.vault_write', 'sys/wrapping/wrap', data=secrets) }}" + ansible.builtin.command: 'vault unwrap {{ wrapped }}' +""" + +RETURN = """ +_raw: + description: The raw result of the write against the given path. + type: list + elements: dict +""" + +from ansible.errors import AnsibleError +from ansible.utils.display import Display + +from ansible.module_utils.six import raise_from + +from ansible_collections.community.hashi_vault.plugins.plugin_utils._hashi_vault_lookup_base import HashiVaultLookupBase +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultValueError + +display = Display() + +try: + import hvac +except ImportError as imp_exc: + HVAC_IMPORT_ERROR = imp_exc +else: + HVAC_IMPORT_ERROR = None + + +class LookupModule(HashiVaultLookupBase): + def run(self, terms, variables=None, **kwargs): + if HVAC_IMPORT_ERROR: + raise_from( + AnsibleError("This plugin requires the 'hvac' Python library"), + HVAC_IMPORT_ERROR + ) + + ret = [] + + self.set_options(direct=kwargs, var_options=variables) + # TODO: remove process_deprecations() if backported fix is available (see method definition) + self.process_deprecations() + + self.connection_options.process_connection_options() + client_args = self.connection_options.get_hvac_connection_options() + client = self.helper.get_vault_client(**client_args) + + data = self._options_adapter.get_option('data') + wrap_ttl = self._options_adapter.get_option_default('wrap_ttl') + + try: + self.authenticator.validate() + self.authenticator.authenticate(client) + except (NotImplementedError, HashiVaultValueError) as e: + raise_from(AnsibleError(e), e) + + for term in terms: + try: + response = client.write(path=term, wrap_ttl=wrap_ttl, **data) + except hvac.exceptions.Forbidden as e: + raise_from(AnsibleError("Forbidden: Permission Denied to path '%s'." % term), e) + except hvac.exceptions.InvalidPath as e: + raise_from(AnsibleError("The path '%s' doesn't seem to exist." % term), e) + except hvac.exceptions.InternalServerError as e: + raise_from(AnsibleError("Internal Server Error: %s" % str(e)), e) + + # https://github.com/hvac/hvac/issues/797 + # HVAC returns a raw response object when the body is not JSON. + # That includes 204 responses, which are successful with no body. + # So we will try to detect that and a act accordingly. + # A better way may be to implement our own adapter for this + # collection, but it's a little premature to do that. + if hasattr(response, 'json') and callable(response.json): + if response.status_code == 204: + output = {} + else: + display.warning('Vault returned status code %i and an unparsable body.' % response.status_code) + output = response.content + else: + output = response + + ret.append(output) + + return ret diff --git a/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_approle.py b/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_approle.py new file mode 100644 index 000000000..0c261d3a9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_approle.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause) +# SPDX-License-Identifier: BSD-2-Clause + +'''Python versions supported: >=3.6''' + +# FOR INTERNAL COLLECTION USE ONLY +# The interfaces in this file are meant for use within the community.hashi_vault collection +# and may not remain stable to outside uses. Changes may be made in ANY release, even a bugfix release. +# See also: https://github.com/ansible/community/issues/539#issuecomment-780839686 +# Please open an issue if you have questions about this. + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultAuthMethodBase + + +class HashiVaultAuthMethodApprole(HashiVaultAuthMethodBase): + '''HashiVault option group class for auth: approle''' + + NAME = 'approle' + OPTIONS = ['role_id', 'secret_id', 'mount_point'] + + def __init__(self, option_adapter, warning_callback, deprecate_callback): + super(HashiVaultAuthMethodApprole, self).__init__(option_adapter, warning_callback, deprecate_callback) + + def validate(self): + self.validate_by_required_fields('role_id') + + def authenticate(self, client, use_token=True): + params = self._options.get_filled_options(*self.OPTIONS) + try: + response = client.auth.approle.login(use_token=use_token, **params) + except (NotImplementedError, AttributeError): + self.warn("HVAC should be updated to version 0.10.6 or higher. Deprecated method 'auth_approle' will be used.") + response = client.auth_approle(use_token=use_token, **params) + + return response diff --git a/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_aws_iam.py b/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_aws_iam.py new file mode 100644 index 000000000..e3bb004ba --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_aws_iam.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause) +# SPDX-License-Identifier: BSD-2-Clause + +'''Python versions supported: >=3.6''' + +# FOR INTERNAL COLLECTION USE ONLY +# The interfaces in this file are meant for use within the community.hashi_vault collection +# and may not remain stable to outside uses. Changes may be made in ANY release, even a bugfix release. +# See also: https://github.com/ansible/community/issues/539#issuecomment-780839686 +# Please open an issue if you have questions about this. + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import ( + HashiVaultAuthMethodBase, + HashiVaultValueError, +) + + +class HashiVaultAuthMethodAwsIam(HashiVaultAuthMethodBase): + '''HashiVault option group class for auth: userpass''' + + NAME = 'aws_iam' + OPTIONS = [ + 'aws_profile', + 'aws_access_key', + 'aws_secret_key', + 'aws_security_token', + 'region', + 'aws_iam_server_id', + 'role_id', + ] + + def __init__(self, option_adapter, warning_callback, deprecate_callback): + super(HashiVaultAuthMethodAwsIam, self).__init__(option_adapter, warning_callback, deprecate_callback) + + def validate(self): + params = { + 'access_key': self._options.get_option_default('aws_access_key'), + 'secret_key': self._options.get_option_default('aws_secret_key'), + } + + session_token = self._options.get_option_default('aws_security_token') + if session_token: + params['session_token'] = session_token + + mount_point = self._options.get_option_default('mount_point') + if mount_point: + params['mount_point'] = mount_point + + role = self._options.get_option_default('role_id') + if role: + params['role'] = role + + region = self._options.get_option_default('region') + if region: + params['region'] = region + + header_value = self._options.get_option_default('aws_iam_server_id') + if header_value: + params['header_value'] = header_value + + if not (params['access_key'] and params['secret_key']): + try: + import boto3 + import botocore + except ImportError: + raise HashiVaultValueError("boto3 is required for loading a profile or IAM role credentials.") + + profile = self._options.get_option_default('aws_profile') + try: + session_credentials = boto3.session.Session(profile_name=profile).get_credentials() + except botocore.exceptions.ProfileNotFound: + raise HashiVaultValueError("The AWS profile '%s' was not found." % profile) + + if not session_credentials: + raise HashiVaultValueError("No AWS credentials supplied or available.") + + params['access_key'] = session_credentials.access_key + params['secret_key'] = session_credentials.secret_key + if session_credentials.token: + params['session_token'] = session_credentials.token + + self._auth_aws_iam_login_params = params + + def authenticate(self, client, use_token=True): + params = self._auth_aws_iam_login_params + try: + response = client.auth.aws.iam_login(use_token=use_token, **params) + except (NotImplementedError, AttributeError): + self.warn("HVAC should be updated to version 0.9.3 or higher. Deprecated method 'auth_aws_iam' will be used.") + client.auth_aws_iam(use_token=use_token, **params) + + return response diff --git a/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_azure.py b/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_azure.py new file mode 100644 index 000000000..36f44e07c --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_azure.py @@ -0,0 +1,105 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Junrui Chen (@jchenship) +# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause) +# SPDX-License-Identifier: BSD-2-Clause + +'''Python versions supported: >=3.6''' + +# FOR INTERNAL COLLECTION USE ONLY +# The interfaces in this file are meant for use within the community.hashi_vault collection +# and may not remain stable to outside uses. Changes may be made in ANY release, even a bugfix release. +# See also: https://github.com/ansible/community/issues/539#issuecomment-780839686 +# Please open an issue if you have questions about this. + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import ( + HashiVaultAuthMethodBase, + HashiVaultValueError, +) + + +class HashiVaultAuthMethodAzure(HashiVaultAuthMethodBase): + '''HashiVault auth method for Azure''' + + NAME = 'azure' + OPTIONS = [ + 'role_id', + 'jwt', + 'mount_point', + 'azure_tenant_id', + 'azure_client_id', + 'azure_client_secret', + 'azure_resource', + ] + + def __init__(self, option_adapter, warning_callback, deprecate_callback): + super(HashiVaultAuthMethodAzure, self).__init__( + option_adapter, warning_callback, deprecate_callback + ) + + def validate(self): + params = { + 'role': self._options.get_option_default('role_id'), + 'jwt': self._options.get_option_default('jwt'), + } + if not params['role']: + raise HashiVaultValueError( + 'role_id is required for azure authentication.' + ) + + # if mount_point is not provided, it will use the default value defined + # in hvac library (e.g. `azure`) + mount_point = self._options.get_option_default('mount_point') + if mount_point: + params['mount_point'] = mount_point + + # if jwt exists, use provided jwt directly, otherwise trying to get jwt + # from azure service principal or managed identity + if not params['jwt']: + azure_tenant_id = self._options.get_option_default('azure_tenant_id') + azure_client_id = self._options.get_option_default('azure_client_id') + azure_client_secret = self._options.get_option_default('azure_client_secret') + + # the logic of getting azure scope is from this function + # https://github.com/Azure/azure-cli/blob/azure-cli-2.39.0/src/azure-cli-core/azure/cli/core/auth/util.py#L72 + # the reason we expose resource instead of scope is resource is + # more aligned with the vault azure auth config here + # https://www.vaultproject.io/api-docs/auth/azure#resource + azure_resource = self._options.get_option('azure_resource') + azure_scope = azure_resource + "/.default" + + try: + import azure.identity + except ImportError: + raise HashiVaultValueError( + "azure-identity is required for getting access token from azure service principal or managed identity." + ) + + if azure_client_id and azure_client_secret: + # service principal + if not azure_tenant_id: + raise HashiVaultValueError( + 'azure_tenant_id is required when using azure service principal.' + ) + azure_credentials = azure.identity.ClientSecretCredential( + azure_tenant_id, azure_client_id, azure_client_secret + ) + elif azure_client_id: + # user assigned managed identity + azure_credentials = azure.identity.ManagedIdentityCredential( + client_id=azure_client_id + ) + else: + # system assigned managed identity + azure_credentials = azure.identity.ManagedIdentityCredential() + + params['jwt'] = azure_credentials.get_token(azure_scope).token + + self._auth_azure_login_params = params + + def authenticate(self, client, use_token=True): + params = self._auth_azure_login_params + response = client.auth.azure.login(use_token=use_token, **params) + return response diff --git a/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_cert.py b/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_cert.py new file mode 100644 index 000000000..af5d3bb15 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_cert.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Devon Mar (@devon-mar) +# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause) +# SPDX-License-Identifier: BSD-2-Clause + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultAuthMethodBase + + +class HashiVaultAuthMethodCert(HashiVaultAuthMethodBase): + """HashiVault option group class for auth: cert""" + + NAME = "cert" + OPTIONS = ["cert_auth_public_key", "cert_auth_private_key", "mount_point", "role_id"] + + def __init__(self, option_adapter, warning_callback, deprecate_callback): + super(HashiVaultAuthMethodCert, self).__init__(option_adapter, warning_callback, deprecate_callback) + + def validate(self): + self.validate_by_required_fields("cert_auth_public_key", "cert_auth_private_key") + + def authenticate(self, client, use_token=True): + options = self._options.get_filled_options(*self.OPTIONS) + + params = { + "cert_pem": options["cert_auth_public_key"], + "key_pem": options["cert_auth_private_key"], + } + + if "mount_point" in options: + params["mount_point"] = options["mount_point"] + if "role_id" in options: + params["name"] = options["role_id"] + + try: + response = client.auth.cert.login(use_token=use_token, **params) + except NotImplementedError: + raise NotImplementedError("cert authentication requires HVAC version 0.10.12 or higher.") + + return response diff --git a/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_jwt.py b/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_jwt.py new file mode 100644 index 000000000..da2919426 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_jwt.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause) +# SPDX-License-Identifier: BSD-2-Clause + +'''Python versions supported: >=3.6''' + +# FOR INTERNAL COLLECTION USE ONLY +# The interfaces in this file are meant for use within the community.hashi_vault collection +# and may not remain stable to outside uses. Changes may be made in ANY release, even a bugfix release. +# See also: https://github.com/ansible/community/issues/539#issuecomment-780839686 +# Please open an issue if you have questions about this. + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultAuthMethodBase + + +class HashiVaultAuthMethodJwt(HashiVaultAuthMethodBase): + '''HashiVault option group class for auth: jwt''' + + NAME = 'jwt' + OPTIONS = ['jwt', 'role_id', 'mount_point'] + + def __init__(self, option_adapter, warning_callback, deprecate_callback): + super(HashiVaultAuthMethodJwt, self).__init__(option_adapter, warning_callback, deprecate_callback) + + def validate(self): + self.validate_by_required_fields('role_id', 'jwt') + + def authenticate(self, client, use_token=True): + params = self._options.get_filled_options(*self.OPTIONS) + params['role'] = params.pop('role_id') + + if 'mount_point' in params: + params['path'] = params.pop('mount_point') + + try: + response = client.auth.jwt.jwt_login(**params) + except (NotImplementedError, AttributeError): + raise NotImplementedError("JWT authentication requires HVAC version 0.10.5 or higher.") + + # must manually set the client token with JWT login + # see https://github.com/hvac/hvac/issues/644 + # fixed in https://github.com/hvac/hvac/pull/746 + # but we do it manually to maintain compatibilty with older hvac versions. + if use_token: + client.token = response['auth']['client_token'] + + return response diff --git a/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_ldap.py b/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_ldap.py new file mode 100644 index 000000000..7fcb6b38e --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_ldap.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause) +# SPDX-License-Identifier: BSD-2-Clause + +'''Python versions supported: >=3.6''' + +# FOR INTERNAL COLLECTION USE ONLY +# The interfaces in this file are meant for use within the community.hashi_vault collection +# and may not remain stable to outside uses. Changes may be made in ANY release, even a bugfix release. +# See also: https://github.com/ansible/community/issues/539#issuecomment-780839686 +# Please open an issue if you have questions about this. + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultAuthMethodBase + + +class HashiVaultAuthMethodLdap(HashiVaultAuthMethodBase): + '''HashiVault option group class for auth: ldap''' + + NAME = 'ldap' + OPTIONS = ['username', 'password', 'mount_point'] + + def __init__(self, option_adapter, warning_callback, deprecate_callback): + super(HashiVaultAuthMethodLdap, self).__init__(option_adapter, warning_callback, deprecate_callback) + + def validate(self): + self.validate_by_required_fields('username', 'password') + + def authenticate(self, client, use_token=True): + params = self._options.get_filled_options(*self.OPTIONS) + try: + response = client.auth.ldap.login(use_token=use_token, **params) + except (NotImplementedError, AttributeError): + self.warn("HVAC should be updated to version 0.7.0 or higher. Deprecated method 'auth_ldap' will be used.") + response = client.auth_ldap(use_token=use_token, **params) + + return response diff --git a/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_none.py b/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_none.py new file mode 100644 index 000000000..22c3e28f9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_none.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause) +# SPDX-License-Identifier: BSD-2-Clause + +'''Python versions supported: >=3.6''' + +# FOR INTERNAL COLLECTION USE ONLY +# The interfaces in this file are meant for use within the community.hashi_vault collection +# and may not remain stable to outside uses. Changes may be made in ANY release, even a bugfix release. +# See also: https://github.com/ansible/community/issues/539#issuecomment-780839686 +# Please open an issue if you have questions about this. + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultAuthMethodBase + + +class HashiVaultAuthMethodNone(HashiVaultAuthMethodBase): + '''HashiVault option group class for auth: none''' + + NAME = 'none' + OPTIONS = [] + + def __init__(self, option_adapter, warning_callback, deprecate_callback): + super(HashiVaultAuthMethodNone, self).__init__(option_adapter, warning_callback, deprecate_callback) + + def validate(self): + pass + + def authenticate(self, client, use_token=False): + return None diff --git a/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_token.py b/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_token.py new file mode 100644 index 000000000..3b66b1937 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_token.py @@ -0,0 +1,105 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause) +# SPDX-License-Identifier: BSD-2-Clause + +'''Python versions supported: >=3.6''' + +# FOR INTERNAL COLLECTION USE ONLY +# The interfaces in this file are meant for use within the community.hashi_vault collection +# and may not remain stable to outside uses. Changes may be made in ANY release, even a bugfix release. +# See also: https://github.com/ansible/community/issues/539#issuecomment-780839686 +# Please open an issue if you have questions about this. + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +import os + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import ( + HashiVaultAuthMethodBase, + HashiVaultValueError, +) + + +class HashiVaultAuthMethodToken(HashiVaultAuthMethodBase): + '''HashiVault option group class for auth: userpass''' + + NAME = 'token' + OPTIONS = ['token', 'token_path', 'token_file', 'token_validate'] + + _LATE_BINDING_ENV_VAR_OPTIONS = { + 'token': dict(env=['VAULT_TOKEN']), + 'token_path': dict(env=['HOME']), + } + + def __init__(self, option_adapter, warning_callback, deprecate_callback): + super(HashiVaultAuthMethodToken, self).__init__(option_adapter, warning_callback, deprecate_callback) + + def _simulate_login_response(self, token, lookup_response=None): + '''returns a similar structure to a login method's return, optionally incorporating a lookup-self response''' + + response = { + 'auth': { + 'client_token': token + } + } + + if lookup_response is None: + return response + + # first merge in the entire response at the top level + # but, rather than being missing, the auth field is going to be None, + # so we explicitly overwrite that with our original value. + response.update(lookup_response, auth=response['auth']) + + # then we'll merge the data field right into the auth field + response['auth'].update(lookup_response['data']) + + # and meta->metadata needs a name change + metadata = response['auth'].pop('meta', None) + if metadata: + response['auth']['metadata'] = metadata + + return response + + def validate(self): + self.process_late_binding_env_vars(self._LATE_BINDING_ENV_VAR_OPTIONS) + + if self._options.get_option_default('token') is None and self._options.get_option_default('token_path') is not None: + token_filename = os.path.join( + self._options.get_option('token_path'), + self._options.get_option('token_file') + ) + if os.path.exists(token_filename): + if not os.path.isfile(token_filename): + raise HashiVaultValueError("The Vault token file '%s' was found but is not a file." % token_filename) + with open(token_filename) as token_file: + self._options.set_option('token', token_file.read().strip()) + + if self._options.get_option_default('token') is None: + raise HashiVaultValueError("No Vault Token specified or discovered.") + + def authenticate(self, client, use_token=True, lookup_self=False): + token = self._stringify(self._options.get_option('token')) + validate = self._options.get_option_default('token_validate') + + response = None + + if use_token: + client.token = token + + if lookup_self or validate: + from hvac import exceptions + + try: + try: + response = client.auth.token.lookup_self() + except (NotImplementedError, AttributeError): + # usually we would warn here, but the v1 method doesn't seem to be deprecated (yet?) + response = client.lookup_token() # when token=None on this method, it calls lookup-self + except (exceptions.Forbidden, exceptions.InvalidPath, exceptions.InvalidRequest): + if validate: + raise HashiVaultValueError("Invalid Vault Token Specified.") + + return self._simulate_login_response(token, response) diff --git a/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_userpass.py b/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_userpass.py new file mode 100644 index 000000000..f9ba58f60 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/module_utils/_auth_method_userpass.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause) +# SPDX-License-Identifier: BSD-2-Clause + +'''Python versions supported: >=3.6''' + +# FOR INTERNAL COLLECTION USE ONLY +# The interfaces in this file are meant for use within the community.hashi_vault collection +# and may not remain stable to outside uses. Changes may be made in ANY release, even a bugfix release. +# See also: https://github.com/ansible/community/issues/539#issuecomment-780839686 +# Please open an issue if you have questions about this. + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultAuthMethodBase + + +class HashiVaultAuthMethodUserpass(HashiVaultAuthMethodBase): + '''HashiVault option group class for auth: userpass''' + + NAME = 'userpass' + OPTIONS = ['username', 'password', 'mount_point'] + + def __init__(self, option_adapter, warning_callback, deprecate_callback): + super(HashiVaultAuthMethodUserpass, self).__init__(option_adapter, warning_callback, deprecate_callback) + + def validate(self): + self.validate_by_required_fields('username', 'password') + + def authenticate(self, client, use_token=True): + params = self._options.get_filled_options(*self.OPTIONS) + try: + response = client.auth.userpass.login(**params) + except (NotImplementedError, AttributeError): + self.warn("HVAC should be updated to version 0.9.6 or higher. Deprecated method 'auth_userpass' will be used.") + response = client.auth_userpass(**params) + + # must manually set the client token with userpass login + # see https://github.com/hvac/hvac/issues/644 + # fixed in 0.11.0 (https://github.com/hvac/hvac/pull/733) + # but we keep the old behavior to maintain compatibility with older hvac + if use_token: + client.token = response['auth']['client_token'] + + return response diff --git a/ansible_collections/community/hashi_vault/plugins/module_utils/_authenticator.py b/ansible_collections/community/hashi_vault/plugins/module_utils/_authenticator.py new file mode 100644 index 000000000..acf574bfe --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/module_utils/_authenticator.py @@ -0,0 +1,102 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause) +# SPDX-License-Identifier: BSD-2-Clause + +'''Python versions supported: >=3.6''' + +# FOR INTERNAL COLLECTION USE ONLY +# The interfaces in this file are meant for use within the community.hashi_vault collection +# and may not remain stable to outside uses. Changes may be made in ANY release, even a bugfix release. +# See also: https://github.com/ansible/community/issues/539#issuecomment-780839686 +# Please open an issue if you have questions about this. + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +# please keep this list in alphabetical order of auth method name +from ansible_collections.community.hashi_vault.plugins.module_utils._auth_method_approle import HashiVaultAuthMethodApprole +from ansible_collections.community.hashi_vault.plugins.module_utils._auth_method_aws_iam import HashiVaultAuthMethodAwsIam +from ansible_collections.community.hashi_vault.plugins.module_utils._auth_method_azure import HashiVaultAuthMethodAzure +from ansible_collections.community.hashi_vault.plugins.module_utils._auth_method_cert import HashiVaultAuthMethodCert +from ansible_collections.community.hashi_vault.plugins.module_utils._auth_method_jwt import HashiVaultAuthMethodJwt +from ansible_collections.community.hashi_vault.plugins.module_utils._auth_method_ldap import HashiVaultAuthMethodLdap +from ansible_collections.community.hashi_vault.plugins.module_utils._auth_method_none import HashiVaultAuthMethodNone +from ansible_collections.community.hashi_vault.plugins.module_utils._auth_method_token import HashiVaultAuthMethodToken +from ansible_collections.community.hashi_vault.plugins.module_utils._auth_method_userpass import HashiVaultAuthMethodUserpass + + +class HashiVaultAuthenticator(): + ARGSPEC = dict( + auth_method=dict(type='str', default='token', choices=[ + 'token', + 'userpass', + 'ldap', + 'approle', + 'aws_iam', + 'azure', + 'jwt', + 'cert', + 'none', + ]), + mount_point=dict(type='str'), + token=dict(type='str', no_log=True, default=None), + token_path=dict(type='str', default=None, no_log=False), + token_file=dict(type='str', default='.vault-token'), + token_validate=dict(type='bool', default=False), + username=dict(type='str'), + password=dict(type='str', no_log=True), + role_id=dict(type='str'), + secret_id=dict(type='str', no_log=True), + jwt=dict(type='str', no_log=True), + aws_profile=dict(type='str', aliases=['boto_profile']), + aws_access_key=dict(type='str', aliases=['aws_access_key_id'], no_log=False), + aws_secret_key=dict(type='str', aliases=['aws_secret_access_key'], no_log=True), + aws_security_token=dict(type='str', no_log=False), + region=dict(type='str'), + aws_iam_server_id=dict(type='str'), + azure_tenant_id=dict(type='str'), + azure_client_id=dict(type='str'), + azure_client_secret=dict(type='str', no_log=True), + azure_resource=dict(type='str', default='https://management.azure.com/'), + cert_auth_private_key=dict(type='path', no_log=False), + cert_auth_public_key=dict(type='path'), + ) + + def __init__(self, option_adapter, warning_callback, deprecate_callback): + self._options = option_adapter + self._selector = { + # please keep this list in alphabetical order of auth method name + # so that it's easier to scan and see at a glance that a given auth method is present or absent + 'approle': HashiVaultAuthMethodApprole(option_adapter, warning_callback, deprecate_callback), + 'aws_iam': HashiVaultAuthMethodAwsIam(option_adapter, warning_callback, deprecate_callback), + 'azure': HashiVaultAuthMethodAzure(option_adapter, warning_callback, deprecate_callback), + 'cert': HashiVaultAuthMethodCert(option_adapter, warning_callback, deprecate_callback), + 'jwt': HashiVaultAuthMethodJwt(option_adapter, warning_callback, deprecate_callback), + 'ldap': HashiVaultAuthMethodLdap(option_adapter, warning_callback, deprecate_callback), + 'none': HashiVaultAuthMethodNone(option_adapter, warning_callback, deprecate_callback), + 'token': HashiVaultAuthMethodToken(option_adapter, warning_callback, deprecate_callback), + 'userpass': HashiVaultAuthMethodUserpass(option_adapter, warning_callback, deprecate_callback), + } + + self.warn = warning_callback + self.deprecate = deprecate_callback + + def _get_method_object(self, method=None): + if method is None: + method = self._options.get_option('auth_method') + + try: + o_method = self._selector[method] + except KeyError: + raise NotImplementedError("auth method '%s' is not implemented in HashiVaultAuthenticator" % method) + + return o_method + + def validate(self, *args, **kwargs): + method = self._get_method_object(kwargs.pop('method', None)) + method.validate(*args, **kwargs) + + def authenticate(self, *args, **kwargs): + method = self._get_method_object(kwargs.pop('method', None)) + return method.authenticate(*args, **kwargs) diff --git a/ansible_collections/community/hashi_vault/plugins/module_utils/_connection_options.py b/ansible_collections/community/hashi_vault/plugins/module_utils/_connection_options.py new file mode 100644 index 000000000..f570479d0 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/module_utils/_connection_options.py @@ -0,0 +1,260 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause) +# SPDX-License-Identifier: BSD-2-Clause + +'''Python versions supported: >=3.6''' + +# FOR INTERNAL COLLECTION USE ONLY +# The interfaces in this file are meant for use within the community.hashi_vault collection +# and may not remain stable to outside uses. Changes may be made in ANY release, even a bugfix release. +# See also: https://github.com/ansible/community/issues/539#issuecomment-780839686 +# Please open an issue if you have questions about this. + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +import os + +from ansible.module_utils.common.text.converters import to_text + +from ansible.module_utils.common.validation import ( + check_type_dict, + check_type_str, + check_type_bool, + check_type_int, +) + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultOptionGroupBase + +# we implement retries via the urllib3 Retry class +# https://github.com/ansible-collections/community.hashi_vault/issues/58 +HAS_RETRIES = False +try: + from requests import Session + from requests.adapters import HTTPAdapter + try: + # try for a standalone urllib3 + import urllib3 + HAS_RETRIES = True + except ImportError: + try: + # failing that try for a vendored version within requests + from requests.packages import urllib3 + HAS_RETRIES = True + except ImportError: + pass +except ImportError: + pass + + +class HashiVaultConnectionOptions(HashiVaultOptionGroupBase): + '''HashiVault option group class for connection options''' + + OPTIONS = ['url', 'proxies', 'ca_cert', 'validate_certs', 'namespace', 'timeout', 'retries', 'retry_action'] + + ARGSPEC = dict( + url=dict(type='str', default=None), + proxies=dict(type='raw'), + ca_cert=dict(type='str', aliases=['cacert'], default=None), + validate_certs=dict(type='bool'), + namespace=dict(type='str', default=None), + timeout=dict(type='int'), + retries=dict(type='raw'), + retry_action=dict(type='str', choices=['ignore', 'warn'], default='warn'), + ) + + _LATE_BINDING_ENV_VAR_OPTIONS = { + 'url': dict(env=['VAULT_ADDR'], required=True), + 'ca_cert': dict(env=['VAULT_CACERT']), + 'namespace': dict(env=['VAULT_NAMESPACE']), + } + + _RETRIES_DEFAULT_PARAMS = { + 'status_forcelist': [ + # https://www.vaultproject.io/api#http-status-codes + # 429 is usually a "too many requests" status, but in Vault it's the default health status response for standby nodes. + 412, # Precondition failed. Returned on Enterprise when a request can't be processed yet due to some missing eventually consistent data. + # Should be retried, perhaps with a little backoff. + 500, # Internal server error. An internal error has occurred, try again later. If the error persists, report a bug. + 502, # A request to Vault required Vault making a request to a third party; the third party responded with an error of some kind. + 503, # Vault is down for maintenance or is currently sealed. Try again later. + ], + ( + # this field name changed in 1.26.0, and in the interest of supporting a wider range of urllib3 versions + # we'll use the new name whenever possible, but fall back seamlessly when needed. + # See also: + # - https://github.com/urllib3/urllib3/issues/2092 + # - https://github.com/urllib3/urllib3/blob/main/CHANGES.rst#1260-2020-11-10 + "allowed_methods" if HAS_RETRIES and hasattr(urllib3.util.Retry.DEFAULT, "allowed_methods") else "method_whitelist" + ): None, # None allows retries on all methods, including those which may not be considered idempotent, like POST + 'backoff_factor': 0.3, + } + + def __init__(self, option_adapter, retry_callback_generator=None): + super(HashiVaultConnectionOptions, self).__init__(option_adapter) + self._retry_callback_generator = retry_callback_generator + + def get_hvac_connection_options(self): + '''returns kwargs to be used for constructing an hvac.Client''' + + # validate_certs is only used to optionally change the value of ca_cert + def _filter(k, v): + return v is not None and k != 'validate_certs' + + # our transformed ca_cert value will become the verify parameter for the hvac client + hvopts = self._options.get_filtered_options(_filter, *self.OPTIONS) + hvopts['verify'] = hvopts.pop('ca_cert') + + retry_action = hvopts.pop('retry_action') + if 'retries' in hvopts: + hvopts['session'] = self._get_custom_requests_session(new_callback=self._retry_callback_generator(retry_action), **hvopts.pop('retries')) + + return hvopts + + def process_connection_options(self): + '''executes special processing required for certain options''' + self.process_late_binding_env_vars(self._LATE_BINDING_ENV_VAR_OPTIONS) + + self._boolean_or_cacert() + self._process_option_proxies() + self._process_option_retries() + + def _get_custom_requests_session(self, **retry_kwargs): + '''returns a requests.Session to pass to hvac (or None)''' + + if not HAS_RETRIES: + # because hvac requires requests which requires urllib3 it's unlikely we'll ever reach this condition. + raise NotImplementedError("Retries are unavailable. This may indicate very old versions of one or more of the following: hvac, requests, urllib3.") + + # This is defined here because Retry may not be defined if its import failed. + # As mentioned above, that's very unlikely, but it'll fail sanity tests nonetheless if defined with other classes. + class CallbackRetry(urllib3.util.Retry): + def __init__(self, *args, **kwargs): + self._newcb = kwargs.pop('new_callback') + super(CallbackRetry, self).__init__(*args, **kwargs) + + def new(self, **kwargs): + if self._newcb is not None: + self._newcb(self) + + kwargs['new_callback'] = self._newcb + return super(CallbackRetry, self).new(**kwargs) + + # We don't want the Retry class raising its own exceptions because that will prevent + # hvac from raising its own on various response codes. + # We set this here, rather than in the defaults, because if the caller sets their own + # dict for retries, we use it directly, but we don't want them to have to remember to always + # set raise_on_status=False themselves to get proper error handling. + # On the off chance someone does set it, we leave it alone, even though it's probably a mistake. + # That will be mentioned in the parameter docs. + if 'raise_on_status' not in retry_kwargs: + retry_kwargs['raise_on_status'] = False + # needs urllib 1.15+ https://github.com/urllib3/urllib3/blob/main/CHANGES.rst#115-2016-04-06 + # but we should always have newer ones via requests, via hvac + + retry = CallbackRetry(**retry_kwargs) + + adapter = HTTPAdapter(max_retries=retry) + sess = Session() + sess.mount("https://", adapter) + sess.mount("http://", adapter) + + return sess + + def _process_option_retries(self): + '''check if retries option is int or dict and interpret it appropriately''' + # this method focuses on validating the option, and setting a valid Retry object construction dict + # it intentionally does not build the Session object, which will be done elsewhere + + retries_opt = self._options.get_option('retries') + + if retries_opt is None: + return + + # we'll start with a copy of our defaults + retries = self._RETRIES_DEFAULT_PARAMS.copy() + + try: + # try int + # on int, retry the specified number of times, and use the defaults for everything else + # on zero, disable retries + retries_int = check_type_int(retries_opt) + + if retries_int < 0: + raise ValueError("Number of retries must be >= 0 (got %i)" % retries_int) + elif retries_int == 0: + retries = None + else: + retries['total'] = retries_int + + except TypeError: + try: + # try dict + # on dict, use the value directly (will be used as the kwargs to initialize the Retry instance) + retries = check_type_dict(retries_opt) + except TypeError: + raise TypeError("retries option must be interpretable as int or dict. Got: %r" % retries_opt) + + self._options.set_option('retries', retries) + + def _process_option_proxies(self): + '''check if 'proxies' option is dict or str and set it appropriately''' + + proxies_opt = self._options.get_option('proxies') + + if proxies_opt is None: + return + + try: + # if it can be interpreted as dict + # do it + proxies = check_type_dict(proxies_opt) + except TypeError: + # if it can't be interpreted as dict + proxy = check_type_str(proxies_opt) + # but can be interpreted as str + # use this str as http and https proxy + proxies = { + 'http': proxy, + 'https': proxy, + } + + # record the new/interpreted value for 'proxies' option + self._options.set_option('proxies', proxies) + + def _boolean_or_cacert(self): + # This is needed because of this (https://hvac.readthedocs.io/en/stable/source/hvac_v1.html): + # + # # verify (Union[bool,str]) - Either a boolean to indicate whether TLS verification should + # # be performed when sending requests to Vault, or a string pointing at the CA bundle to use for verification. + # + '''return a bool or cacert''' + ca_cert = self._options.get_option('ca_cert') + + validate_certs = self._options.get_option('validate_certs') + + if validate_certs is None: + # Validate certs option was not explicitly set + + # Check if VAULT_SKIP_VERIFY is set + vault_skip_verify = os.environ.get('VAULT_SKIP_VERIFY') + + if vault_skip_verify is not None: + # VAULT_SKIP_VERIFY is set + try: + # Check that we have a boolean value + vault_skip_verify = check_type_bool(vault_skip_verify) + except TypeError: + # Not a boolean value fallback to default value (True) + validate_certs = True + else: + # Use the inverse of VAULT_SKIP_VERIFY + validate_certs = not vault_skip_verify + else: + validate_certs = True + + if not (validate_certs and ca_cert): + self._options.set_option('ca_cert', validate_certs) + else: + self._options.set_option('ca_cert', to_text(ca_cert, errors='surrogate_or_strict')) diff --git a/ansible_collections/community/hashi_vault/plugins/module_utils/_hashi_vault_common.py b/ansible_collections/community/hashi_vault/plugins/module_utils/_hashi_vault_common.py new file mode 100644 index 000000000..b39431c05 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/module_utils/_hashi_vault_common.py @@ -0,0 +1,302 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause) +# SPDX-License-Identifier: BSD-2-Clause + +'''Python versions supported: >=3.6''' + +# FOR INTERNAL COLLECTION USE ONLY +# The interfaces in this file are meant for use within the community.hashi_vault collection +# and may not remain stable to outside uses. Changes may be made in ANY release, even a bugfix release. +# See also: https://github.com/ansible/community/issues/539#issuecomment-780839686 +# Please open an issue if you have questions about this. + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +import os + + +HAS_HVAC = False +try: + import hvac + HAS_HVAC = True +except ImportError: + HAS_HVAC = False + + +def _stringify(input): + ''' + This method is primarily used to Un-Unsafe values that come from Ansible. + We want to remove the Unsafe context so that libraries don't get confused + by the values. + ''' + + # Since this is a module_util, and will be used by both plugins and modules, + # we cannot import the AnsibleUnsafe* types, because they are controller-only. + # However, they subclass the native types, so we can check for that. + + # bytes is the only consistent type to check against in both py2 and py3 + if isinstance(input, bytes): + # seems redundant, but this will give us a regular bytes object even + # when the input is AnsibleUnsafeBytes + return bytes(input) + else: + # instead of checking for py2 vs. py3 to cast to str or unicode, + # let's get the type from the literal. + return type(u'')(input) + + +class HashiVaultValueError(ValueError): + '''Use in common code to raise an Exception that can be turned into AnsibleError or used to fail_json()''' + + +class HashiVaultHelper(): + + STRINGIFY_CANDIDATES = set([ + 'token', # Token will end up in a header, requests requires headers to be str or bytes, + # and newer versions of requests stopped converting automatically. Because our + # token could have been passed in from a previous lookup call, it could be one + # of the AnsibleUnsafe types instead, causing a failure. Tokens should always + # be strings, so we will convert them. + 'namespace', # namespace is also set in a header + ]) + + def __init__(self): + # TODO move hvac checking here? + pass + + @staticmethod + def _stringify(input): + return _stringify(input) + + def get_vault_client( + self, + hashi_vault_logout_inferred_token=True, hashi_vault_revoke_on_logout=False, hashi_vault_stringify_args=True, + **kwargs + ): + ''' + creates a Vault client with the given kwargs + + :param hashi_vault_logout_inferred_token: if True performs "logout" after creation to remove any token that + the hvac library itself may have read in. Only used if "token" is not included in kwargs. + :type hashi_vault_logout_implied_token: bool + + :param hashi_vault_revoke_on_logout: if True revokes any current token on logout. Only used if a logout is performed. Not recommended. + :type hashi_vault_revoke_on_logout: bool + + :param hashi_vault_stringify_args: if True converts a specific set of defined kwargs to a string type. + :type hashi_vault_stringify_args: bool + ''' + + if hashi_vault_stringify_args: + for key in kwargs.keys(): + if key in self.STRINGIFY_CANDIDATES: + kwargs[key] = self._stringify(kwargs[key]) + + client = hvac.Client(**kwargs) + + # logout to prevent accidental use of inferred tokens + # https://github.com/ansible-collections/community.hashi_vault/issues/13 + if hashi_vault_logout_inferred_token and 'token' not in kwargs: + client.logout(revoke_token=hashi_vault_revoke_on_logout) + + return client + + +class HashiVaultOptionAdapter(object): + ''' + The purpose of this class is to provide a standard interface for option getting/setting + within module_utils code, since the processes are so different in plugins and modules. + + Attention is paid to ensuring that in plugins we use the methods provided by Config Manager, + but to allow flexibility to create an adapter to work with other sources, hence the design + of defining specific methods exposed, and having them call provided function references. + ''' + # More context on the need to call Config Manager methods: + # + # Some issues raised around deprecations in plugins not being processed led to comments + # from core maintainers around the need to use Config Manager and also to ensure any + # option needed is always retrieved using AnsiblePlugin.get_option(). At the time of this + # writing, based on the way Config Manager is implemented, that's not actually necessary, + # and calling AnsiblePlugin.set_options() to initialize them is enough. But that's not + # guaranteed to stay that way, if get_option() is used to "trigger" internal events. + # + # More reading: + # - https://github.com/ansible-collections/community.hashi_vault/issues/35 + # - https://github.com/ansible/ansible/issues/73051 + # - https://github.com/ansible/ansible/pull/73058 + # - https://github.com/ansible/ansible/pull/73239 + # - https://github.com/ansible/ansible/pull/73240 + + @classmethod + def from_dict(cls, dict): + return cls( + getter=dict.__getitem__, + setter=dict.__setitem__, + haver=lambda key: key in dict, + updater=dict.update, + defaultsetter=dict.setdefault, + defaultgetter=dict.get, + ) + + @classmethod + def from_ansible_plugin(cls, plugin): + return cls( + getter=plugin.get_option, + setter=plugin.set_option, + haver=plugin.has_option if hasattr(plugin, 'has_option') else None, + # AnsiblePlugin.has_option was added in 2.10, see https://github.com/ansible/ansible/pull/61078 + ) + + @classmethod + def from_ansible_module(cls, module): + return cls.from_dict(module.params) + + def __init__( + self, + getter, setter, + haver=None, updater=None, getitems=None, getfiltereditems=None, getfilleditems=None, defaultsetter=None, defaultgetter=None): + + def _default_default_setter(key, default=None): + try: + value = self.get_option(key) + return value + except KeyError: + self.set_option(key, default) + return default + + def _default_updater(**kwargs): + for key, value in kwargs.items(): + self.set_option(key, value) + + def _default_haver(key): + try: + self.get_option(key) + return True + except KeyError: + return False + + def _default_getitems(*args): + return dict((key, self.get_option(key)) for key in args) + + def _default_getfiltereditems(filter, *args): + return dict((key, value) for key, value in self.get_options(*args).items() if filter(key, value)) + + def _default_getfilleditems(*args): + return self.get_filtered_options(lambda k, v: v is not None, *args) + + def _default_default_getter(key, default): + try: + return self.get_option(key) + except KeyError: + return default + + self._getter = getter + self._setter = setter + + self._haver = haver or _default_haver + self._updater = updater or _default_updater + self._getitems = getitems or _default_getitems + self._getfiltereditems = getfiltereditems or _default_getfiltereditems + self._getfilleditems = getfilleditems or _default_getfilleditems + self._defaultsetter = defaultsetter or _default_default_setter + self._defaultgetter = defaultgetter or _default_default_getter + + def get_option(self, key): + return self._getter(key) + + def get_option_default(self, key, default=None): + return self._defaultgetter(key, default) + + def set_option(self, key, value): + return self._setter(key, value) + + def set_option_default(self, key, default=None): + return self._defaultsetter(key, default) + + def has_option(self, key): + return self._haver(key) + + def set_options(self, **kwargs): + return self._updater(**kwargs) + + def get_options(self, *args): + return self._getitems(*args) + + def get_filtered_options(self, filter, *args): + return self._getfiltereditems(filter, *args) + + def get_filled_options(self, *args): + return self._getfilleditems(*args) + + +class HashiVaultOptionGroupBase: + '''A base class for class option group classes''' + + def __init__(self, option_adapter): + self._options = option_adapter + + def process_late_binding_env_vars(self, option_vars): + '''looks through a set of options, and if empty/None, looks for a value in specified env vars, or sets an optional default''' + # see https://github.com/ansible-collections/community.hashi_vault/issues/10 + # + # Options which seek to use environment vars that are not Ansible-specific + # should load those as values of last resort, so that INI values can override them. + # For default processing, list such options and vars here. + # Alternatively, process them in another appropriate place like an auth method's + # validate_ method. + # + # key = option_name + # value = dict with "env" key which is a list of env vars (in order of those checked first; process stops when value is found), + # and an optional "default" key whose value will be set if none of the env vars are found. + # An optional boolean "required" key can be used to specify that a value is required, so raise if one is not found. + + for opt, config in option_vars.items(): + for env in config['env']: + # we use has_option + get_option rather than get_option_default + # because we will only override if the option exists and + # is None, not if it's missing. For plugins, that is the usual, + # but for modules, they may have to set the default to None + # in the argspec if it has late binding env vars. + if self._options.has_option(opt) and self._options.get_option(opt) is None: + self._options.set_option(opt, os.environ.get(env)) + + if 'default' in config and self._options.has_option(opt) and self._options.get_option(opt) is None: + self._options.set_option(opt, config['default']) + + if 'required' in config and self._options.get_option_default(opt) is None: + raise HashiVaultValueError("Required option %s was not set." % opt) + + +class HashiVaultAuthMethodBase(HashiVaultOptionGroupBase): + '''Base class for individual auth method implementations''' + + def __init__(self, option_adapter, warning_callback, deprecate_callback): + super(HashiVaultAuthMethodBase, self).__init__(option_adapter) + self._warner = warning_callback + self._deprecator = deprecate_callback + + def validate(self): + '''Validates the given auth method as much as possible without calling Vault.''' + raise NotImplementedError('validate must be implemented') + + def authenticate(self, client, use_token=True): + '''Authenticates against Vault, returns a token.''' + raise NotImplementedError('authenticate must be implemented') + + def validate_by_required_fields(self, *field_names): + missing = [field for field in field_names if self._options.get_option_default(field) is None] + + if missing: + raise HashiVaultValueError("Authentication method %s requires options %r to be set, but these are missing: %r" % (self.NAME, field_names, missing)) + + def warn(self, message): + self._warner(message) + + def deprecate(self, message, version=None, date=None, collection_name=None): + self._deprecator(message, version=version, date=date, collection_name=collection_name) + + @staticmethod + def _stringify(input): + return _stringify(input) diff --git a/ansible_collections/community/hashi_vault/plugins/module_utils/_hashi_vault_module.py b/ansible_collections/community/hashi_vault/plugins/module_utils/_hashi_vault_module.py new file mode 100644 index 000000000..5d2dfafc6 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/module_utils/_hashi_vault_module.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause) +# SPDX-License-Identifier: BSD-2-Clause + +# FOR INTERNAL COLLECTION USE ONLY +# The interfaces in this file are meant for use within the community.hashi_vault collection +# and may not remain stable to outside uses. Changes may be made in ANY release, even a bugfix release. +# See also: https://github.com/ansible/community/issues/539#issuecomment-780839686 +# Please open an issue if you have questions about this. + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +from ansible.module_utils.basic import AnsibleModule + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import ( + HashiVaultHelper, + HashiVaultOptionAdapter, +) +from ansible_collections.community.hashi_vault.plugins.module_utils._connection_options import HashiVaultConnectionOptions +from ansible_collections.community.hashi_vault.plugins.module_utils._authenticator import HashiVaultAuthenticator + + +class HashiVaultModule(AnsibleModule): + def __init__(self, *args, **kwargs): + if 'hashi_vault_custom_retry_callback' in kwargs: + callback = kwargs.pop('hashi_vault_custom_retry_callback') + else: + callback = self._generate_retry_callback + + super(HashiVaultModule, self).__init__(*args, **kwargs) + + self.helper = HashiVaultHelper() + self.adapter = HashiVaultOptionAdapter.from_dict(self.params) + self.connection_options = HashiVaultConnectionOptions(option_adapter=self.adapter, retry_callback_generator=callback) + self.authenticator = HashiVaultAuthenticator(option_adapter=self.adapter, warning_callback=self.warn, deprecate_callback=self.deprecate) + + @classmethod + def generate_argspec(cls, **kwargs): + spec = HashiVaultConnectionOptions.ARGSPEC.copy() + spec.update(HashiVaultAuthenticator.ARGSPEC.copy()) + spec.update(**kwargs) + + return spec + + def _generate_retry_callback(self, retry_action): + '''returns a Retry callback function for modules''' + def _on_retry(retry_obj): + if retry_obj.total > 0: + if retry_action == 'warn': + self.warn('community.hashi_vault: %i %s remaining.' % (retry_obj.total, 'retry' if retry_obj.total == 1 else 'retries')) + else: + pass + + return _on_retry diff --git a/ansible_collections/community/hashi_vault/plugins/modules/vault_kv1_get.py b/ansible_collections/community/hashi_vault/plugins/modules/vault_kv1_get.py new file mode 100644 index 000000000..e21f4a813 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/modules/vault_kv1_get.py @@ -0,0 +1,197 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# (c) 2022, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = r''' +module: vault_kv1_get +version_added: 2.5.0 +author: + - Brian Scholer (@briantist) +short_description: Get a secret from HashiCorp Vault's KV version 1 secret store +requirements: + - C(hvac) (L(Python library,https://hvac.readthedocs.io/en/stable/overview.html)) + - For detailed requirements, see R(the collection requirements page,ansible_collections.community.hashi_vault.docsite.user_guide.requirements). +description: + - Gets a secret from HashiCorp Vault's KV version 1 secret store. +seealso: + - ref: community.hashi_vault.vault_kv1_get lookup <ansible_collections.community.hashi_vault.vault_kv1_get_lookup> + description: The official documentation for the C(community.hashi_vault.vault_kv1_get) lookup plugin. + - module: community.hashi_vault.vault_kv2_get + - name: KV1 Secrets Engine + description: Documentation for the Vault KV secrets engine, version 1. + link: https://www.vaultproject.io/docs/secrets/kv/kv-v1 +extends_documentation_fragment: + - community.hashi_vault.attributes + - community.hashi_vault.attributes.action_group + - community.hashi_vault.attributes.check_mode_read_only + - community.hashi_vault.connection + - community.hashi_vault.auth + - community.hashi_vault.engine_mount +options: + engine_mount_point: + default: kv + path: + description: + - Vault KV path to be read. + - This is relative to the I(engine_mount_point), so the mount path should not be included. + type: str + required: True +''' + +EXAMPLES = r''' +- name: Read a kv1 secret from Vault via the remote host with userpass auth + community.hashi_vault.vault_kv1_get: + url: https://vault:8201 + path: hello + auth_method: userpass + username: user + password: '{{ passwd }}' + register: response + # equivalent API path is kv/hello + +- name: Display the results + ansible.builtin.debug: + msg: + - "Secret: {{ response.secret }}" + - "Data: {{ response.data }} (same as secret in kv1)" + - "Metadata: {{ response.metadata }} (response info in kv1)" + - "Full response: {{ response.raw }}" + - "Value of key 'password' in the secret: {{ response.secret.password }}" + +- name: Read a secret from kv1 with a different mount via the remote host + community.hashi_vault.vault_kv1_get: + url: https://vault:8201 + engine_mount_point: custom/kv1/mount + path: hello + register: response + # equivalent API path is custom/kv1/mount/hello + +- name: Display the results + ansible.builtin.debug: + msg: + - "Secret: {{ response.secret }}" + - "Data: {{ response.data }} (same as secret in kv1)" + - "Metadata: {{ response.metadata }} (response info in kv1)" + - "Full response: {{ response.raw }}" +''' + +RETURN = r''' +raw: + description: The raw result of the read against the given path. + returned: success + type: dict + sample: + auth: null + data: + Key1: value1 + Key2: value2 + lease_duration: 2764800 + lease_id: "" + renewable: false + request_id: e99f145f-f02a-7073-1229-e3f191057a70 + warnings: null + wrap_info: null +data: + description: The C(data) field of raw result. This can also be accessed via C(raw.data). + returned: success + type: dict + sample: + Key1: value1 + Key2: value2 +secret: + description: The C(data) field of the raw result. This is identical to C(data) in the return values. + returned: success + type: dict + sample: + Key1: value1 + Key2: value2 +metadata: + description: This is a synthetic result. It is the same as C(raw) with C(data) removed. + returned: success + type: dict + sample: + auth: null + lease_duration: 2764800 + lease_id: "" + renewable: false + request_id: e99f145f-f02a-7073-1229-e3f191057a70 + warnings: null + wrap_info: null +''' + +import traceback + +from ansible.module_utils._text import to_native +from ansible.module_utils.basic import missing_required_lib + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_module import HashiVaultModule +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultValueError + +try: + import hvac +except ImportError: + HAS_HVAC = False + HVAC_IMPORT_ERROR = traceback.format_exc() +else: + HVAC_IMPORT_ERROR = None + HAS_HVAC = True + + +def run_module(): + argspec = HashiVaultModule.generate_argspec( + engine_mount_point=dict(type='str', default='kv'), + path=dict(type='str', required=True), + ) + + module = HashiVaultModule( + argument_spec=argspec, + supports_check_mode=True + ) + + if not HAS_HVAC: + module.fail_json( + msg=missing_required_lib('hvac'), + exception=HVAC_IMPORT_ERROR + ) + + engine_mount_point = module.params.get('engine_mount_point') + path = module.params.get('path') + + module.connection_options.process_connection_options() + client_args = module.connection_options.get_hvac_connection_options() + client = module.helper.get_vault_client(**client_args) + + try: + module.authenticator.validate() + module.authenticator.authenticate(client) + except (NotImplementedError, HashiVaultValueError) as e: + module.fail_json(msg=to_native(e), exception=traceback.format_exc()) + + try: + raw = client.secrets.kv.v1.read_secret(path=path, mount_point=engine_mount_point) + except hvac.exceptions.Forbidden as e: + module.fail_json(msg="Forbidden: Permission Denied to path ['%s']." % path, exception=traceback.format_exc()) + except hvac.exceptions.InvalidPath as e: + if 'Invalid path for a versioned K/V secrets engine' in to_native(e): + msg = "Invalid path for a versioned K/V secrets engine ['%s']. If this is a KV version 2 path, use community.hashi_vault.vault_kv2_get." + else: + msg = "Invalid or missing path ['%s']." + + module.fail_json(msg=msg % (path,), exception=traceback.format_exc()) + + metadata = raw.copy() + data = metadata.pop('data') + module.exit_json(raw=raw, data=data, secret=data, metadata=metadata) + + +def main(): + run_module() + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/community/hashi_vault/plugins/modules/vault_kv2_delete.py b/ansible_collections/community/hashi_vault/plugins/modules/vault_kv2_delete.py new file mode 100644 index 000000000..3145e4a5d --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/modules/vault_kv2_delete.py @@ -0,0 +1,180 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# (c) 2022, Isaac Wagner (@idwagner) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = ''' +module: vault_kv2_delete +version_added: 3.4.0 +author: + - Isaac Wagner (@idwagner) +short_description: Delete one or more versions of a secret from HashiCorp Vault's KV version 2 secret store +requirements: + - C(hvac) (L(Python library,https://hvac.readthedocs.io/en/stable/overview.html)) + - For detailed requirements, see R(the collection requirements page,ansible_collections.community.hashi_vault.docsite.user_guide.requirements). +description: + - Delete one or more versions of a secret from HashiCorp Vault's KV version 2 secret store. +notes: + - This module always reports C(changed) status because it cannot guarantee idempotence. + - Use C(changed_when) to control that in cases where the operation is known to not change state. +attributes: + check_mode: + support: partial + details: + - In check mode, the module returns C(changed) status without contacting Vault. + - Consider using M(community.hashi_vault.vault_kv2_get) to verify the existence of the secret first. +seealso: + - module: community.hashi_vault.vault_kv2_get + - module: community.hashi_vault.vault_kv2_write + - name: KV2 Secrets Engine + description: Documentation for the Vault KV secrets engine, version 2. + link: https://www.vaultproject.io/docs/secrets/kv/kv-v2 +extends_documentation_fragment: + - community.hashi_vault.attributes + - community.hashi_vault.attributes.action_group + - community.hashi_vault.connection + - community.hashi_vault.auth + - community.hashi_vault.engine_mount +options: + engine_mount_point: + default: secret + path: + description: + - Vault KV path to be deleted. + - This is relative to the I(engine_mount_point), so the mount path should not be included. + - For kv2, do not include C(/data/) or C(/metadata/). + type: str + required: True + versions: + description: + - One or more versions of the secret to delete. + - When omitted, the latest version of the secret is deleted. + type: list + elements: int + required: False +''' + +EXAMPLES = """ +- name: Delete the latest version of the secret/mysecret secret. + community.hashi_vault.vault_kv2_delete: + url: https://vault:8201 + path: secret/mysecret + auth_method: userpass + username: user + password: '{{ passwd }}' + register: result + +- name: Delete versions 1 and 3 of the secret/mysecret secret. + community.hashi_vault.vault_kv2_delete: + url: https://vault:8201 + path: secret/mysecret + versions: [1, 3] + auth_method: userpass + username: user + password: '{{ passwd }}' +""" + +RETURN = """ +data: + description: + - The raw result of the delete against the given path. + - This is usually empty, but may contain warnings or other information. + returned: success + type: dict +""" + +import traceback + +from ansible.module_utils._text import to_native +from ansible.module_utils.basic import missing_required_lib + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_module import HashiVaultModule +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultValueError + +try: + import hvac +except ImportError: + HAS_HVAC = False + HVAC_IMPORT_ERROR = traceback.format_exc() +else: + HVAC_IMPORT_ERROR = None + HAS_HVAC = True + + +def run_module(): + + argspec = HashiVaultModule.generate_argspec( + engine_mount_point=dict(type='str', default='secret'), + path=dict(type='str', required=True), + versions=dict(type='list', elements='int', required=False) + ) + + module = HashiVaultModule( + argument_spec=argspec, + supports_check_mode=True + ) + + if not HAS_HVAC: + module.fail_json( + msg=missing_required_lib('hvac'), + exception=HVAC_IMPORT_ERROR + ) + + engine_mount_point = module.params.get('engine_mount_point') + path = module.params.get('path') + versions = module.params.get('versions') + + module.connection_options.process_connection_options() + client_args = module.connection_options.get_hvac_connection_options() + client = module.helper.get_vault_client(**client_args) + + try: + module.authenticator.validate() + module.authenticator.authenticate(client) + except (NotImplementedError, HashiVaultValueError) as e: + module.fail_json(msg=to_native(e), exception=traceback.format_exc()) + + try: + # Vault has two separate methods, one for delete latest version, + # and delete specific versions. + if module.check_mode: + response = {} + elif not versions: + response = client.secrets.kv.v2.delete_latest_version_of_secret( + path=path, mount_point=engine_mount_point) + else: + response = client.secrets.kv.v2.delete_secret_versions( + path=path, versions=versions, mount_point=engine_mount_point) + + except hvac.exceptions.Forbidden as e: + module.fail_json(msg="Forbidden: Permission Denied to path ['%s']." % path, exception=traceback.format_exc()) + + # https://github.com/hvac/hvac/issues/797 + # HVAC returns a raw response object when the body is not JSON. + # That includes 204 responses, which are successful with no body. + # So we will try to detect that and a act accordingly. + # A better way may be to implement our own adapter for this + # collection, but it's a little premature to do that. + if hasattr(response, 'json') and callable(response.json): + if response.status_code == 204: + output = {} + else: + module.warn( + 'Vault returned status code %i and an unparsable body.' % response.status_code) + output = response.content + else: + output = response + + module.exit_json(changed=True, data=output) + + +def main(): + run_module() + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/community/hashi_vault/plugins/modules/vault_kv2_get.py b/ansible_collections/community/hashi_vault/plugins/modules/vault_kv2_get.py new file mode 100644 index 000000000..04a549d59 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/modules/vault_kv2_get.py @@ -0,0 +1,213 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# (c) 2022, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = r''' +module: vault_kv2_get +version_added: 2.5.0 +author: + - Brian Scholer (@briantist) +short_description: Get a secret from HashiCorp Vault's KV version 2 secret store +requirements: + - C(hvac) (L(Python library,https://hvac.readthedocs.io/en/stable/overview.html)) + - For detailed requirements, see R(the collection requirements page,ansible_collections.community.hashi_vault.docsite.user_guide.requirements). +description: + - Gets a secret from HashiCorp Vault's KV version 2 secret store. +seealso: + - ref: community.hashi_vault.vault_kv2_get lookup <ansible_collections.community.hashi_vault.vault_kv2_get_lookup> + description: The official documentation for the C(community.hashi_vault.vault_kv2_get) lookup plugin. + - module: community.hashi_vault.vault_kv1_get + - module: community.hashi_vault.vault_kv2_write + - name: KV2 Secrets Engine + description: Documentation for the Vault KV secrets engine, version 2. + link: https://www.vaultproject.io/docs/secrets/kv/kv-v2 +extends_documentation_fragment: + - community.hashi_vault.attributes + - community.hashi_vault.attributes.action_group + - community.hashi_vault.attributes.check_mode_read_only + - community.hashi_vault.connection + - community.hashi_vault.auth + - community.hashi_vault.engine_mount +options: + engine_mount_point: + default: secret + path: + description: + - Vault KV path to be read. + - This is relative to the I(engine_mount_point), so the mount path should not be included. + - For kv2, do not include C(/data/) or C(/metadata/). + type: str + required: True + version: + description: Specifies the version to return. If not set the latest version is returned. + type: int +''' + +EXAMPLES = r''' +- name: Read the latest version of a kv2 secret from Vault via the remote host with userpass auth + community.hashi_vault.vault_kv2_get: + url: https://vault:8201 + path: hello + auth_method: userpass + username: user + password: '{{ passwd }}' + register: response + # equivalent API path is secret/data/hello + +- name: Display the results + ansible.builtin.debug: + msg: + - "Secret: {{ response.secret }}" + - "Data: {{ response.data }} (contains secret data & metadata in kv2)" + - "Metadata: {{ response.metadata }}" + - "Full response: {{ response.raw }}" + - "Value of key 'password' in the secret: {{ response.secret.password }}" + +- name: Read version 5 of a secret from kv2 with a different mount via the remote host + community.hashi_vault.vault_kv2_get: + url: https://vault:8201 + engine_mount_point: custom/kv2/mount + path: hello + version: 5 + register: response + # equivalent API path is custom/kv2/mount/data/hello + +- name: Assert that the version returned is as expected + ansible.builtin.assert: + that: + - response.metadata.version == 5 +''' + +RETURN = r''' +raw: + description: The raw result of the read against the given path. + returned: success + type: dict + sample: + auth: null + data: + data: + Key1: value1 + Key2: value2 + metadata: + created_time: "2022-04-21T15:56:58.8525402Z" + custom_metadata: null + deletion_time: "" + destroyed: false + version: 2 + lease_duration: 0 + lease_id: "" + renewable: false + request_id: dc829675-9119-e831-ae74-35fc5d33d200 + warnings: null + wrap_info: null +data: + description: The C(data) field of raw result. This can also be accessed via C(raw.data). + returned: success + type: dict + sample: + data: + Key1: value1 + Key2: value2 + metadata: + created_time: "2022-04-21T15:56:58.8525402Z" + custom_metadata: null + deletion_time: "" + destroyed: false + version: 2 +secret: + description: The C(data) field within the C(data) field. Equivalent to C(raw.data.data). + returned: success + type: dict + sample: + Key1: value1 + Key2: value2 +metadata: + description: The C(metadata) field within the C(data) field. Equivalent to C(raw.data.metadata). + returned: success + type: dict + sample: + created_time: "2022-04-21T15:56:58.8525402Z" + custom_metadata: null + deletion_time: "" + destroyed: false + version: 2 +''' + +import traceback + +from ansible.module_utils._text import to_native +from ansible.module_utils.basic import missing_required_lib + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_module import HashiVaultModule +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultValueError + +try: + import hvac +except ImportError: + HAS_HVAC = False + HVAC_IMPORT_ERROR = traceback.format_exc() +else: + HVAC_IMPORT_ERROR = None + HAS_HVAC = True + + +def run_module(): + argspec = HashiVaultModule.generate_argspec( + engine_mount_point=dict(type='str', default='secret'), + path=dict(type='str', required=True), + version=dict(type='int'), + ) + + module = HashiVaultModule( + argument_spec=argspec, + supports_check_mode=True + ) + + if not HAS_HVAC: + module.fail_json( + msg=missing_required_lib('hvac'), + exception=HVAC_IMPORT_ERROR + ) + + engine_mount_point = module.params.get('engine_mount_point') + path = module.params.get('path') + version = module.params.get('version') + + module.connection_options.process_connection_options() + client_args = module.connection_options.get_hvac_connection_options() + client = module.helper.get_vault_client(**client_args) + + try: + module.authenticator.validate() + module.authenticator.authenticate(client) + except (NotImplementedError, HashiVaultValueError) as e: + module.fail_json(msg=to_native(e), exception=traceback.format_exc()) + + try: + raw = client.secrets.kv.v2.read_secret_version(path=path, version=version, mount_point=engine_mount_point) + except hvac.exceptions.Forbidden as e: + module.fail_json(msg="Forbidden: Permission Denied to path ['%s']." % path, exception=traceback.format_exc()) + except hvac.exceptions.InvalidPath as e: + module.fail_json( + msg="Invalid or missing path ['%s'] with secret version '%s'. Check the path or secret version." % (path, version or 'latest'), + exception=traceback.format_exc() + ) + + data = raw['data'] + metadata = data['metadata'] + secret = data['data'] + module.exit_json(raw=raw, data=data, secret=secret, metadata=metadata) + + +def main(): + run_module() + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/community/hashi_vault/plugins/modules/vault_kv2_write.py b/ansible_collections/community/hashi_vault/plugins/modules/vault_kv2_write.py new file mode 100644 index 000000000..d226987c6 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/modules/vault_kv2_write.py @@ -0,0 +1,278 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# (c) 2023, Devon Mar (@devon-mar) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = """ +module: vault_kv2_write +version_added: 4.2.0 +author: + - Devon Mar (@devon-mar) +short_description: Perform a write operation against a KVv2 secret in HashiCorp Vault +description: + - Perform a write operation against a KVv2 secret in HashiCorp Vault. +requirements: + - C(hvac) (L(Python library,https://hvac.readthedocs.io/en/stable/overview.html)) + - For detailed requirements, see R(the collection requirements page,ansible_collections.community.hashi_vault.docsite.user_guide.requirements). +seealso: + - module: community.hashi_vault.vault_write + - module: community.hashi_vault.vault_kv2_get + - module: community.hashi_vault.vault_kv2_delete + - ref: community.hashi_vault.vault_write lookup <ansible_collections.community.hashi_vault.vault_write_lookup> + description: The official documentation for the C(community.hashi_vault.vault_write) lookup plugin. + - name: KV2 Secrets Engine + description: Documentation for the Vault KV secrets engine, version 2. + link: https://www.vaultproject.io/docs/secrets/kv/kv-v2 +extends_documentation_fragment: + - community.hashi_vault.attributes + - community.hashi_vault.attributes.action_group + - community.hashi_vault.connection + - community.hashi_vault.auth + - community.hashi_vault.engine_mount +attributes: + check_mode: + support: partial + details: + - If I(read_before_write) is C(true), full check mode functionality is supported. + - If I(read_before_write) is C(false), the status will always be C(changed) but a write will not be performed in check mode. +options: + engine_mount_point: + type: str + default: secret + path: + type: str + required: true + description: + - Vault KVv2 path to be written to. + - This is relative to the I(engine_mount_point), so the mount path should not be included. + data: + type: dict + required: true + description: + - KVv2 secret data to write. + cas: + type: int + description: + - Perform a check-and-set operation. + read_before_write: + type: bool + default: false + description: + - Read the secret first and write only when I(data) differs from the read data. + - Requires C(read) permission on the secret if C(true). + - If C(false), this module will always write to I(path) when not in check mode. +""" + +EXAMPLES = r""" +- name: Write/create a secret + community.hashi_vault.vault_kv2_write: + url: https://vault:8200 + path: hello + data: + foo: bar + +- name: Create a secret with CAS (the secret must not exist) + community.hashi_vault.vault_kv2_write: + url: https://vault:8200 + path: caspath + cas: 0 + data: + foo: bar + +- name: Update a secret with CAS + community.hashi_vault.vault_kv2_write: + url: https://vault:8200 + path: caspath + cas: 2 + data: + hello: world + +# This module does not have patch capability built in. +# Patching can be achieved with multiple tasks. + +- name: Retrieve current secret + register: current + community.hashi_vault.vault_kv2_get: + url: https://vault:8200 + path: hello + +## patch without CAS +- name: Update the secret + vars: + values_to_update: + foo: baz + hello: goodbye + community.hashi_vault.vault_kv2_write: + url: https://vault:8200 + path: hello + data: >- + {{ + current.secret + | combine(values_to_update) + }} + +## patch with CAS +- name: Update the secret + vars: + values_to_update: + foo: baz + hello: goodbye + community.hashi_vault.vault_kv2_write: + url: https://vault:8200 + path: hello + cas: '{{ current.metadata.version | int }}' + data: >- + {{ + current.secret + | combine(values_to_update) + }} +""" + +RETURN = r""" +raw: + type: dict + description: The raw Vault response. + returned: changed + sample: + auth: + data: + created_time: "2023-02-21T19:51:50.801757862Z" + custom_metadata: + deletion_time: "" + destroyed: false + version: 1 + lease_duration: 0 + lease_id: "" + renewable: false + request_id: 52eb1aa7-5a38-9a02-9246-efc5bf9581ec + warnings: null + wrap_info: null +""" + +import traceback + +from ansible.module_utils._text import to_native +from ansible.module_utils.basic import missing_required_lib +from ..module_utils._hashi_vault_common import ( + HashiVaultValueError, +) +from ..module_utils._hashi_vault_module import ( + HashiVaultModule, +) + +try: + import hvac +except ImportError: + HAS_HVAC = False + HVAC_IMPORT_ERROR = traceback.format_exc() +else: + HVAC_IMPORT_ERROR = None + HAS_HVAC = True + + +def run_module(): + argspec = HashiVaultModule.generate_argspec( + engine_mount_point=dict(type="str", default="secret"), + path=dict(type="str", required=True), + data=dict(type="dict", required=True, no_log=True), + cas=dict(type="int"), + read_before_write=dict(type="bool", default=False), + ) + + module = HashiVaultModule( + argument_spec=argspec, + supports_check_mode=True, + ) + + if not HAS_HVAC: + module.fail_json( + msg=missing_required_lib("hvac"), + exception=HVAC_IMPORT_ERROR, + ) + + mount_point = module.params.get("engine_mount_point") + path = module.params.get("path") + cas = module.params.get("cas") + data = module.params.get("data") + read_before_write = module.params.get("read_before_write") + + module.connection_options.process_connection_options() + client_args = module.connection_options.get_hvac_connection_options() + client = module.helper.get_vault_client(**client_args) + + try: + module.authenticator.validate() + module.authenticator.authenticate(client) + except (NotImplementedError, HashiVaultValueError) as e: + module.fail_json(msg=to_native(e), exception=traceback.format_exc()) + + if read_before_write is True: + try: + response = client.secrets.kv.v2.read_secret_version( + path=path, mount_point=mount_point + ) + if "data" not in response or "data" not in response["data"]: + module.fail_json( + msg="Vault response did not contain data: %s" % response + ) + current_data = response["data"]["data"] + except hvac.exceptions.InvalidPath: + current_data = {} + except hvac.exceptions.Forbidden: + module.fail_json( + msg="Permission denied reading %s" % path, + exception=traceback.format_exc(), + ) + except hvac.exceptions.VaultError: + module.fail_json( + msg="VaultError reading %s" % path, + exception=traceback.format_exc(), + ) + else: + current_data = {} + + changed = current_data != data + + if changed is True and module.check_mode is False: + args = { + "path": path, + "secret": data, + "mount_point": mount_point, + } + if cas is not None: + args["cas"] = cas + + try: + raw = client.secrets.kv.v2.create_or_update_secret(**args) + except hvac.exceptions.InvalidRequest: + module.fail_json( + msg="InvalidRequest writing to '%s'" % path, + exception=traceback.format_exc(), + ) + except hvac.exceptions.InvalidPath: + module.fail_json( + msg="InvalidPath writing to '%s'" % path, + exception=traceback.format_exc(), + ) + except hvac.exceptions.Forbidden: + module.fail_json( + msg="Permission denied writing to '%s'" % path, + exception=traceback.format_exc(), + ) + + module.exit_json(changed=True, raw=raw) + + module.exit_json(changed=changed) + + +def main(): + run_module() + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/community/hashi_vault/plugins/modules/vault_list.py b/ansible_collections/community/hashi_vault/plugins/modules/vault_list.py new file mode 100644 index 000000000..a0823dc2d --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/modules/vault_list.py @@ -0,0 +1,134 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# (c) 2023, Tom Kivlin (@tomkivlin) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = """ + module: vault_list + version_added: 4.1.0 + author: + - Tom Kivlin (@tomkivlin) + short_description: Perform a list operation against HashiCorp Vault + requirements: + - C(hvac) (L(Python library,https://hvac.readthedocs.io/en/stable/overview.html)) + - For detailed requirements, see R(the collection requirements page,ansible_collections.community.hashi_vault.docsite.user_guide.requirements). + description: + - Performs a generic list operation against a given path in HashiCorp Vault. + seealso: + - ref: community.hashi_vault.vault_list lookup <ansible_collections.community.hashi_vault.vault_list_lookup> + description: The official documentation for the C(community.hashi_vault.vault_list) lookup plugin. + extends_documentation_fragment: + - community.hashi_vault.attributes + - community.hashi_vault.attributes.action_group + - community.hashi_vault.attributes.check_mode_read_only + - community.hashi_vault.connection + - community.hashi_vault.auth + options: + path: + description: Vault path to be listed. + type: str + required: true +""" + +EXAMPLES = """ +- name: List kv2 secrets from Vault via the remote host with userpass auth + community.hashi_vault.vault_list: + url: https://vault:8201 + path: secret/metadata + # For kv2, the path needs to follow the pattern 'mount_point/metadata' or 'mount_point/metadata/path' to list all secrets in that path + auth_method: userpass + username: user + password: '{{ passwd }}' + register: secret + +- name: Display the secrets found at the path provided above + ansible.builtin.debug: + msg: "{{ secret.data.data['keys'] }}" + # Note that secret.data.data.keys won't work as 'keys' is a built-in method + +- name: List access policies from Vault via the remote host + community.hashi_vault.vault_list: + url: https://vault:8201 + path: sys/policies/acl + register: policies + +- name: Display the policy names + ansible.builtin.debug: + msg: "{{ policies.data.data['keys'] }}" + # Note that secret.data.data.keys won't work as 'keys' is a built-in method +""" + +RETURN = """ +data: + description: The raw result of the list against the given path. + returned: success + type: dict +""" + +import traceback + +from ansible.module_utils._text import to_native +from ansible.module_utils.basic import missing_required_lib + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_module import HashiVaultModule +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultValueError + +try: + import hvac +except ImportError: + HAS_HVAC = False + HVAC_IMPORT_ERROR = traceback.format_exc() +else: + HVAC_IMPORT_ERROR = None + HAS_HVAC = True + + +def run_module(): + argspec = HashiVaultModule.generate_argspec( + path=dict(type='str', required=True), + ) + + module = HashiVaultModule( + argument_spec=argspec, + supports_check_mode=True + ) + + if not HAS_HVAC: + module.fail_json( + msg=missing_required_lib('hvac'), + exception=HVAC_IMPORT_ERROR + ) + + path = module.params.get('path') + + module.connection_options.process_connection_options() + client_args = module.connection_options.get_hvac_connection_options() + client = module.helper.get_vault_client(**client_args) + + try: + module.authenticator.validate() + module.authenticator.authenticate(client) + except (NotImplementedError, HashiVaultValueError) as e: + module.fail_json(msg=to_native(e), exception=traceback.format_exc()) + + try: + data = client.list(path) + except hvac.exceptions.Forbidden as e: + module.fail_json(msg="Forbidden: Permission Denied to path '%s'." % path, exception=traceback.format_exc()) + + if data is None: + module.fail_json(msg="The path '%s' doesn't seem to exist." % path) + + module.exit_json(data=data) + + +def main(): + run_module() + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/community/hashi_vault/plugins/modules/vault_login.py b/ansible_collections/community/hashi_vault/plugins/modules/vault_login.py new file mode 100644 index 000000000..fe0408da2 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/modules/vault_login.py @@ -0,0 +1,177 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# (c) 2021, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = """ + module: vault_login + version_added: 2.2.0 + author: + - Brian Scholer (@briantist) + short_description: Perform a login operation against HashiCorp Vault + requirements: + - C(hvac) (L(Python library,https://hvac.readthedocs.io/en/stable/overview.html)) + - For detailed requirements, see R(the collection requirements page,ansible_collections.community.hashi_vault.docsite.user_guide.requirements). + description: + - Performs a login operation against a given path in HashiCorp Vault, returning the login response, including the token. + seealso: + - ref: community.hashi_vault.vault_login lookup <ansible_collections.community.hashi_vault.vault_login_lookup> + description: The official documentation for the C(community.hashi_vault.vault_login) lookup plugin. + - ref: community.hashi_vault.vault_login_token filter <ansible_collections.community.hashi_vault.vault_login_token_filter> + description: The official documentation for the C(community.hashi_vault.vault_login_token) filter plugin. + extends_documentation_fragment: + - community.hashi_vault.attributes + - community.hashi_vault.attributes.action_group + - community.hashi_vault.connection + - community.hashi_vault.auth + notes: + - "A login is a write operation (creating a token persisted to storage), so this module always reports C(changed=True), + except when used with C(token) auth, because no new token is created in that case. For the purposes of Ansible playbooks however, + it may be more useful to set C(changed_when=false) if you're doing idempotency checks against the target system." + - The C(none) auth method is not valid for this module because there is no response to return. + - "With C(token) auth, no actual login is performed. + Instead, the given token's additional information is returned in a structure that resembles what login responses look like." + - "The C(token) auth method will only return full information if I(token_validate=True). + If the token does not have the C(lookup-self) capability, this will fail. If I(token_validate=False), only the token value itself + will be returned in the structure." + attributes: + check_mode: + support: partial + details: + - In check mode, this module will not perform a login, and will instead return a basic structure with an empty token. + However this may not be useful if the token is required for follow on tasks. + - It may be better to use this module with C(check_mode=false) in order to have a valid token that can be used. + options: + token_validate: + default: true +""" + +EXAMPLES = """ +- name: Login and use the resulting token + community.hashi_vault.vault_login: + url: https://vault:8201 + auth_method: userpass + username: user + password: '{{ passwd }}' + register: login_data + +- name: Retrieve an approle role ID (token via filter) + community.hashi_vault.vault_read: + url: https://vault:8201 + auth_method: token + token: '{{ login_data | community.hashi_vault.vault_login_token }}' + path: auth/approle/role/role-name/role-id + register: approle_id + +- name: Retrieve an approle role ID (token via direct dict access) + community.hashi_vault.vault_read: + url: https://vault:8201 + auth_method: token + token: '{{ login_data.login.auth.client_token }}' + path: auth/approle/role/role-name/role-id + register: approle_id +""" + +RETURN = """ +login: + description: The result of the login against the given auth method. + returned: success + type: dict + contains: + auth: + description: The C(auth) member of the login response. + returned: success + type: dict + contains: + client_token: + description: Contains the token provided by the login operation (or the input token when I(auth_method=token)). + returned: success + type: str + data: + description: The C(data) member of the login response. + returned: success, when available + type: dict +""" + +import traceback + +from ansible.module_utils._text import to_native +from ansible.module_utils.basic import missing_required_lib + +from ...plugins.module_utils._hashi_vault_module import HashiVaultModule +from ...plugins.module_utils._hashi_vault_common import HashiVaultValueError + +# we don't actually need to import hvac directly in this module +# because all of the hvac calls happen in module utils, but +# we would like to control the error message here for consistency. +try: + import hvac # pylint: disable=unused-import +except ImportError: + HAS_HVAC = False + HVAC_IMPORT_ERROR = traceback.format_exc() +else: + HVAC_IMPORT_ERROR = None + HAS_HVAC = True + + +def run_module(): + argspec = HashiVaultModule.generate_argspec( + # we override this from the shared argspec in order to turn off no_log + # otherwise we would not be able to return the input token value + token=dict(type='str', no_log=False, default=None), + + # we override this from the shared argspec because the default for + # this module should be True, which differs from the rest of the + # collection since 4.0.0. + token_validate=dict(type='bool', default=True) + ) + + module = HashiVaultModule( + argument_spec=argspec, + supports_check_mode=True + ) + + if not HAS_HVAC: + module.fail_json( + msg=missing_required_lib('hvac'), + exception=HVAC_IMPORT_ERROR + ) + + # a login is technically a write operation, using storage and resources + changed = True + auth_method = module.params.get('auth_method') + + if auth_method == 'none': + module.fail_json(msg="The 'none' auth method is not valid for this module.") + + if auth_method == 'token': + # with the token auth method, we don't actually perform a login operation + # nor change the state of Vault; it's read-only (to lookup the token's info) + changed = False + + module.connection_options.process_connection_options() + client_args = module.connection_options.get_hvac_connection_options() + client = module.helper.get_vault_client(**client_args) + + try: + module.authenticator.validate() + if module.check_mode: + response = {'auth': {'client_token': None}} + else: + response = module.authenticator.authenticate(client) + except (NotImplementedError, HashiVaultValueError) as e: + module.fail_json(msg=to_native(e), exception=traceback.format_exc()) + + module.exit_json(changed=changed, login=response) + + +def main(): + run_module() + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/community/hashi_vault/plugins/modules/vault_pki_generate_certificate.py b/ansible_collections/community/hashi_vault/plugins/modules/vault_pki_generate_certificate.py new file mode 100644 index 000000000..66b9190b4 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/modules/vault_pki_generate_certificate.py @@ -0,0 +1,296 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# (c) 2022, Florent David (@Ripolin) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = """ + module: vault_pki_generate_certificate + version_added: 2.3.0 + author: + - Florent David (@Ripolin) + short_description: Generates a new set of credentials (private key and certificate) using HashiCorp Vault PKI + requirements: + - C(hvac) (L(Python library,https://hvac.readthedocs.io/en/stable/changelog.html#may-25th-2019)) version C(0.9.1) or higher + - For detailed requirements, see R(the collection requirements page,ansible_collections.community.hashi_vault.docsite.user_guide.requirements). + description: + - Generates a new set of credentials (private key and certificate) based on a Vault PKI role. + seealso: + - name: HashiCorp Vault PKI Secrets Engine API + description: API documentation for the HashiCorp Vault PKI secrets engine. + link: https://www.vaultproject.io/api/secret/pki#generate-certificate + - name: HVAC library reference + description: HVAC library reference about the PKI engine. + link: https://hvac.readthedocs.io/en/stable/usage/secrets_engines/pki.html#generate-certificate + extends_documentation_fragment: + - community.hashi_vault.attributes + - community.hashi_vault.attributes.action_group + - community.hashi_vault.connection + - community.hashi_vault.auth + - community.hashi_vault.engine_mount + attributes: + check_mode: + support: partial + details: + - In check mode, this module will not contact Vault and will return an empty C(data) field and C(changed) status. + options: + alt_names: + description: + - Specifies requested Subject Alternative Names. + - These can be host names or email addresses; they will be parsed into their respective fields. + - If any requested names do not match role policy, the entire request will be denied. + type: list + elements: str + default: [] + common_name: + description: + - Specifies the requested CN for the certificate. + - If the CN is allowed by role policy, it will be issued. + type: str + required: true + exclude_cn_from_sans: + description: + - If true, the given I(common_name) will not be included in DNS or Email Subject Alternate Names (as appropriate). + - Useful if the CN is not a hostname or email address, but is instead some human-readable identifier. + type: bool + default: False + format: + description: + - Specifies the format for returned data. + - Can be C(pem), C(der), or C(pem_bundle). + - If C(der), the output is base64 encoded. + - >- + If C(pem_bundle), the C(certificate) field will contain the private key and certificate, concatenated. If the issuing CA is not a Vault-derived + self-signed root, this will be included as well. + type: str + choices: [pem, der, pem_bundle] + default: pem + ip_sans: + description: + - Specifies requested IP Subject Alternative Names. + - Only valid if the role allows IP SANs (which is the default). + type: list + elements: str + default: [] + role_name: + description: + - Specifies the name of the role to create the certificate against. + type: str + required: true + other_sans: + description: + - Specifies custom OID/UTF8-string SANs. + - These must match values specified on the role in C(allowed_other_sans). + - "The format is the same as OpenSSL: C(<oid>;<type>:<value>) where the only current valid type is C(UTF8)." + type: list + elements: str + default: [] + engine_mount_point: + description: + - Specify the mount point used by the PKI engine. + - Defaults to the default used by C(hvac). + private_key_format: + description: + - Specifies the format for marshaling the private key. + - Defaults to C(der) which will return either base64-encoded DER or PEM-encoded DER, depending on the value of I(format). + - The other option is C(pkcs8) which will return the key marshalled as PEM-encoded PKCS8. + type: str + choices: [der, pkcs8] + default: der + ttl: + description: + - Specifies requested Time To Live. + - Cannot be greater than the role's C(max_ttl) value. + - If not provided, the role's C(ttl) value will be used. + - Note that the role values default to system values if not explicitly set. + type: str + uri_sans: + description: + - Specifies the requested URI Subject Alternative Names. + type: list + elements: str + default: [] +""" + +EXAMPLES = """ +- name: Login and use the resulting token + community.hashi_vault.vault_login: + url: https://localhost:8200 + auth_method: ldap + username: "john.doe" + password: "{{ user_passwd }}" + register: login_data + +- name: Generate a certificate with an existing token + community.hashi_vault.vault_pki_generate_certificate: + role_name: test.example.org + common_name: test.example.org + ttl: 8760h + alt_names: + - test2.example.org + - test3.example.org + url: https://vault:8201 + auth_method: token + token: "{{ login_data.login.auth.client_token }}" + register: cert_data + +- name: Display generated certificate + debug: + msg: "{{ cert_data.data.data.certificate }}" +""" + +RETURN = """ +data: + description: Information about newly generated certificate + returned: success + type: complex + contains: + lease_id: + description: Vault lease attached to certificate. + returned: success + type: str + sample: pki/issue/test/7ad6cfa5-f04f-c62a-d477-f33210475d05 + renewable: + description: True if certificate is renewable. + returned: success + type: bool + sample: false + lease_duration: + description: Vault lease duration. + returned: success + type: int + sample: 21600 + data: + description: Payload + returned: success + type: complex + contains: + certificate: + description: Generated certificate. + returned: success + type: str + sample: "-----BEGIN CERTIFICATE-----...-----END CERTIFICATE-----" + issuing_ca: + description: CA certificate. + returned: success + type: str + sample: "-----BEGIN CERTIFICATE-----...-----END CERTIFICATE-----" + ca_chain: + description: Linked list of CA certificates. + returned: success + type: list + elements: str + sample: ["-----BEGIN CERTIFICATE-----...-----END CERTIFICATE-----"] + private_key: + description: Private key used to generate certificate. + returned: success + type: str + sample: "-----BEGIN RSA PRIVATE KEY-----...-----END RSA PRIVATE KEY-----" + private_key_type: + description: Private key algorithm. + returned: success + type: str + sample: rsa + serial_number: + description: Certificate's serial number. + returned: success + type: str + sample: 39:dd:2e:90:b7:23:1f:8d:d3:7d:31:c5:1b:da:84:d0:5b:65:31:58 + warning: + description: Warnings returned by Vault during generation. + returned: success + type: str +""" + +import traceback + +from ansible.module_utils._text import to_native +from ansible.module_utils.basic import missing_required_lib + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_module import HashiVaultModule +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultValueError + +HAS_HVAC = False +try: + import hvac + from hvac.api.secrets_engines.pki import DEFAULT_MOUNT_POINT +except ImportError: + HVAC_IMPORT_ERROR = traceback.format_exc() + HAS_HVAC = False +else: + HVAC_IMPORT_ERROR = None + HAS_HVAC = True + + +def run_module(): + argspec = HashiVaultModule.generate_argspec( + role_name=dict(type='str', required=True), + common_name=dict(type='str', required=True), + alt_names=dict(type='list', elements='str', required=False, default=[]), + ip_sans=dict(type='list', elements='str', required=False, default=[]), + uri_sans=dict(type='list', elements='str', required=False, default=[]), + other_sans=dict(type='list', elements='str', required=False, default=[]), + ttl=dict(type='str', required=False, default=None), + format=dict(type='str', required=False, choices=['pem', 'der', 'pem_bundle'], default='pem'), + private_key_format=dict(type='str', required=False, choices=['der', 'pkcs8'], default='der'), + exclude_cn_from_sans=dict(type='bool', required=False, default=False), + engine_mount_point=dict(type='str', required=False) + ) + + module = HashiVaultModule( + argument_spec=argspec, + supports_check_mode=True + ) + + if not HAS_HVAC: + module.fail_json(msg=missing_required_lib('hvac'), exception=HVAC_IMPORT_ERROR) + + role_name = module.params.get('role_name') + common_name = module.params.get('common_name') + engine_mount_point = module.params.get('engine_mount_point') or DEFAULT_MOUNT_POINT + + extra_params = { + 'alt_names': ','.join(module.params.get('alt_names')), + 'ip_sans': ','.join(module.params.get('ip_sans')), + 'uri_sans': ','.join(module.params.get('uri_sans')), + 'other_sans': ','.join(module.params.get('other_sans')), + 'ttl': module.params.get('ttl'), + 'format': module.params.get('format'), + 'private_key_format': module.params.get('private_key_format'), + 'exclude_cn_from_sans': module.params.get('exclude_cn_from_sans') + } + + module.connection_options.process_connection_options() + client_args = module.connection_options.get_hvac_connection_options() + client = module.helper.get_vault_client(**client_args) + + try: + module.authenticator.validate() + module.authenticator.authenticate(client) + except (NotImplementedError, HashiVaultValueError) as e: + module.fail_json(msg=to_native(e), exception=traceback.format_exc()) + + try: + if module.check_mode: + data = {} + else: + data = client.secrets.pki.generate_certificate( + name=role_name, common_name=common_name, + extra_params=extra_params, mount_point=engine_mount_point + ) + except hvac.exceptions.VaultError as e: + module.fail_json(msg=to_native(e), exception=traceback.format_exc()) + + # generate_certificate is a write operation which always return a new certificate + module.exit_json(changed=True, data=data) + + +def main(): + run_module() + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/community/hashi_vault/plugins/modules/vault_read.py b/ansible_collections/community/hashi_vault/plugins/modules/vault_read.py new file mode 100644 index 000000000..6b6b209d5 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/modules/vault_read.py @@ -0,0 +1,133 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# (c) 2021, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = """ + module: vault_read + version_added: 1.4.0 + author: + - Brian Scholer (@briantist) + short_description: Perform a read operation against HashiCorp Vault + requirements: + - C(hvac) (L(Python library,https://hvac.readthedocs.io/en/stable/overview.html)) + - For detailed requirements, see R(the collection requirements page,ansible_collections.community.hashi_vault.docsite.user_guide.requirements). + description: + - Performs a generic read operation against a given path in HashiCorp Vault. + seealso: + - ref: community.hashi_vault.vault_read lookup <ansible_collections.community.hashi_vault.vault_read_lookup> + description: The official documentation for the C(community.hashi_vault.vault_read) lookup plugin. + - ref: community.hashi_vault.hashi_vault lookup <ansible_collections.community.hashi_vault.hashi_vault_lookup> + description: The official documentation for the C(community.hashi_vault.hashi_vault) lookup plugin. + extends_documentation_fragment: + - community.hashi_vault.attributes + - community.hashi_vault.attributes.action_group + - community.hashi_vault.attributes.check_mode_read_only + - community.hashi_vault.connection + - community.hashi_vault.auth + options: + path: + description: Vault path to be read. + type: str + required: True +""" + +EXAMPLES = """ +- name: Read a kv2 secret from Vault via the remote host with userpass auth + community.hashi_vault.vault_read: + url: https://vault:8201 + path: secret/data/hello + auth_method: userpass + username: user + password: '{{ passwd }}' + register: secret + +- name: Display the secret data + ansible.builtin.debug: + msg: "{{ secret.data.data.data }}" + +- name: Retrieve an approle role ID from Vault via the remote host + community.hashi_vault.vault_read: + url: https://vault:8201 + path: auth/approle/role/role-name/role-id + register: approle_id + +- name: Display the role ID + ansible.builtin.debug: + msg: "{{ approle_id.data.data.role_id }}" +""" + +RETURN = """ +data: + description: The raw result of the read against the given path. + returned: success + type: dict +""" + +import traceback + +from ansible.module_utils._text import to_native +from ansible.module_utils.basic import missing_required_lib + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_module import HashiVaultModule +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultValueError + +try: + import hvac +except ImportError: + HAS_HVAC = False + HVAC_IMPORT_ERROR = traceback.format_exc() +else: + HVAC_IMPORT_ERROR = None + HAS_HVAC = True + + +def run_module(): + argspec = HashiVaultModule.generate_argspec( + path=dict(type='str', required=True), + ) + + module = HashiVaultModule( + argument_spec=argspec, + supports_check_mode=True + ) + + if not HAS_HVAC: + module.fail_json( + msg=missing_required_lib('hvac'), + exception=HVAC_IMPORT_ERROR + ) + + path = module.params.get('path') + + module.connection_options.process_connection_options() + client_args = module.connection_options.get_hvac_connection_options() + client = module.helper.get_vault_client(**client_args) + + try: + module.authenticator.validate() + module.authenticator.authenticate(client) + except (NotImplementedError, HashiVaultValueError) as e: + module.fail_json(msg=to_native(e), exception=traceback.format_exc()) + + try: + data = client.read(path) + except hvac.exceptions.Forbidden as e: + module.fail_json(msg="Forbidden: Permission Denied to path '%s'." % path, exception=traceback.format_exc()) + + if data is None: + module.fail_json(msg="The path '%s' doesn't seem to exist." % path) + + module.exit_json(data=data) + + +def main(): + run_module() + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/community/hashi_vault/plugins/modules/vault_token_create.py b/ansible_collections/community/hashi_vault/plugins/modules/vault_token_create.py new file mode 100644 index 000000000..c2d19422f --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/modules/vault_token_create.py @@ -0,0 +1,223 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# (c) 2022, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = """ + module: vault_token_create + version_added: 2.3.0 + author: + - Brian Scholer (@briantist) + short_description: Create a HashiCorp Vault token + requirements: + - C(hvac) (L(Python library,https://hvac.readthedocs.io/en/stable/overview.html)) + - For detailed requirements, see R(the collection requirements page,ansible_collections.community.hashi_vault.docsite.user_guide.requirements). + description: + - Creates a token in HashiCorp Vault, returning the response, including the token. + seealso: + - ref: community.hashi_vault.vault_token_create lookup <ansible_collections.community.hashi_vault.vault_token_create_lookup> + description: The official documentation for the C(community.hashi_vault.vault_token_create) lookup plugin. + - module: community.hashi_vault.vault_login + - ref: community.hashi_vault.vault_login lookup <ansible_collections.community.hashi_vault.vault_login_lookup> + description: The official documentation for the C(community.hashi_vault.vault_login) lookup plugin. + - ref: community.hashi_vault.vault_login_token filter <ansible_collections.community.hashi_vault.vault_login_token_filter> + description: The official documentation for the C(community.hashi_vault.vault_login_token) filter plugin. + extends_documentation_fragment: + - community.hashi_vault.attributes + - community.hashi_vault.attributes.action_group + - community.hashi_vault.connection + - community.hashi_vault.auth + - community.hashi_vault.token_create + - community.hashi_vault.wrapping + notes: + - Token creation is a write operation (creating a token persisted to storage), so this module always reports C(changed=True). + - For the purposes of Ansible playbooks however, + it may be more useful to set I(changed_when=false) if you are doing idempotency checks against the target system. + attributes: + check_mode: + support: partial + details: + - In check mode, this module will not create a token, and will instead return a basic structure with an empty token. + However, this may not be useful if the token is required for follow on tasks. + - It may be better to use this module with I(check_mode=false) in order to have a valid token that can be used. + options: {} +""" + +EXAMPLES = """ +- name: Login via userpass and create a child token + community.hashi_vault.vault_token_create: + url: https://vault:8201 + auth_method: userpass + username: user + password: '{{ passwd }}' + register: token_data + +- name: Retrieve an approle role ID using the child token (token via filter) + community.hashi_vault.vault_read: + url: https://vault:8201 + auth_method: token + token: '{{ token_data | community.hashi_vault.vault_login_token }}' + path: auth/approle/role/role-name/role-id + register: approle_id + +- name: Retrieve an approle role ID using the child token (token via direct dict access) + community.hashi_vault.vault_read: + url: https://vault:8201 + auth_method: token + token: '{{ token_data.login.auth.client_token }}' + path: auth/approle/role/role-name/role-id + register: approle_id + +# implicitly uses token auth with a token from the environment +- name: Create an orphaned token with a short TTL + community.hashi_vault.vault_token_create: + url: https://vault:8201 + orphan: true + ttl: 60s + register: token_data + +- name: Display the full response + ansible.builtin.debug: + var: token_data.login +""" + +RETURN = """ +login: + description: The result of the token creation operation. + returned: success + type: dict + sample: + auth: + client_token: s.rlwajI2bblHAWU7uPqZhLru3 + data: null + contains: + auth: + description: The C(auth) member of the token response. + returned: success + type: dict + contains: + client_token: + description: Contains the newly created token. + returned: success + type: str + data: + description: The C(data) member of the token response. + returned: success, when available + type: dict +""" + +import traceback + +from ansible.module_utils._text import to_native + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_module import HashiVaultModule +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultValueError + + +PASS_THRU_OPTION_NAMES = [ + 'no_parent', + 'no_default_policy', + 'policies', + 'id', + 'role_name', + 'meta', + 'renewable', + 'ttl', + 'type', + 'explicit_max_ttl', + 'display_name', + 'num_uses', + 'period', + 'entity_alias', + 'wrap_ttl', +] + + +ORPHAN_OPTION_TRANSLATION = { + 'id': 'token_id', + 'role_name': 'role', + 'type': 'token_type', +} + + +def run_module(): + argspec = HashiVaultModule.generate_argspec( + orphan=dict(type='bool', default=False), + no_parent=dict(type='bool'), + no_default_policy=dict(type='bool'), + policies=dict(type='list', elements='str'), + id=dict(type='str'), + role_name=dict(type='str'), + meta=dict(type='dict'), + renewable=dict(type='bool'), + ttl=dict(type='str'), + type=dict(type='str', choices=['batch', 'service']), + explicit_max_ttl=dict(type='str'), + display_name=dict(type='str'), + num_uses=dict(type='int'), + period=dict(type='str'), + entity_alias=dict(type='str'), + wrap_ttl=dict(type='str'), + ) + + module = HashiVaultModule( + argument_spec=argspec, + supports_check_mode=True + ) + + module.connection_options.process_connection_options() + client_args = module.connection_options.get_hvac_connection_options() + client = module.helper.get_vault_client(**client_args) + + try: + module.authenticator.validate() + module.authenticator.authenticate(client) + except (NotImplementedError, HashiVaultValueError) as e: + module.fail_json(msg=to_native(e), exception=traceback.format_exc()) + + pass_thru_options = module.adapter.get_filled_options(*PASS_THRU_OPTION_NAMES) + + orphan_options = pass_thru_options.copy() + + for key in pass_thru_options.keys(): + if key in ORPHAN_OPTION_TRANSLATION: + orphan_options[ORPHAN_OPTION_TRANSLATION[key]] = orphan_options.pop(key) + + # token creation is a write operation, using storage and resources + changed = True + response = None + + if module.check_mode: + module.exit_json(changed=changed, login={'auth': {'client_token': None}}) + + if module.adapter.get_option('orphan'): + try: + try: + # this method was added in hvac 1.0.0 + # See: https://github.com/hvac/hvac/pull/869 + response = client.auth.token.create_orphan(**orphan_options) + except AttributeError: + # this method was removed in hvac 1.0.0 + # See: https://github.com/hvac/hvac/issues/758 + response = client.create_token(orphan=True, **orphan_options) + except Exception as e: + module.fail_json(msg=to_native(e), exception=traceback.format_exc()) + else: + try: + response = client.auth.token.create(**pass_thru_options) + except Exception as e: + module.fail_json(msg=to_native(e), exception=traceback.format_exc()) + + module.exit_json(changed=changed, login=response) + + +def main(): + run_module() + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/community/hashi_vault/plugins/modules/vault_write.py b/ansible_collections/community/hashi_vault/plugins/modules/vault_write.py new file mode 100644 index 000000000..35c7fcb60 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/modules/vault_write.py @@ -0,0 +1,191 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# (c) 2022, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = """ + module: vault_write + version_added: 2.4.0 + author: + - Brian Scholer (@briantist) + short_description: Perform a write operation against HashiCorp Vault + requirements: + - C(hvac) (L(Python library,https://hvac.readthedocs.io/en/stable/overview.html)) + - For detailed requirements, see R(the collection requirements page,ansible_collections.community.hashi_vault.docsite.user_guide.requirements). + description: + - Performs a generic write operation against a given path in HashiCorp Vault, returning any output. + notes: + - C(vault_write) is a generic module to do operations that do not yet have a dedicated module. Where a specific module exists, that should be used instead. + - The I(data) option is not treated as secret and may be logged. Use the C(no_log) keyword if I(data) contains sensitive values. + - This module always reports C(changed) status because it cannot guarantee idempotence. + - Use C(changed_when) to control that in cases where the operation is known to not change state. + attributes: + check_mode: + support: partial + details: + - In check mode, an empty response will be returned and the write will not be performed. + seealso: + - ref: community.hashi_vault.vault_write lookup <ansible_collections.community.hashi_vault.vault_write_lookup> + description: The official documentation for the C(community.hashi_vault.vault_write) lookup plugin. + - module: community.hashi_vault.vault_read + - ref: community.hashi_vault.vault_read lookup <ansible_collections.community.hashi_vault.vault_read_lookup> + description: The official documentation for the C(community.hashi_vault.vault_read) lookup plugin. + extends_documentation_fragment: + - community.hashi_vault.attributes + - community.hashi_vault.attributes.action_group + - community.hashi_vault.connection + - community.hashi_vault.auth + - community.hashi_vault.wrapping + options: + path: + description: Vault path to be written to. + type: str + required: True + data: + description: A dictionary to be serialized to JSON and then sent as the request body. + type: dict + required: false + default: {} +""" + +EXAMPLES = """ +- name: Write a value to the cubbyhole via the remote host with userpass auth + community.hashi_vault.vault_write: + url: https://vault:8201 + path: cubbyhole/mysecret + data: + key1: val1 + key2: val2 + auth_method: userpass + username: user + password: '{{ passwd }}' + register: result + +- name: Display the result of the write (this can be empty) + ansible.builtin.debug: + msg: "{{ result.data }}" + +- name: Write secret to Vault using key value V2 engine + community.hashi_vault.vault_write: + path: secret/data/mysecret + data: + data: + key1: val1 + key2: val2 + +- name: Retrieve an approle role ID from Vault via the remote host + community.hashi_vault.vault_read: + url: https://vault:8201 + path: auth/approle/role/role-name/role-id + register: approle_id + +- name: Generate a secret-id for the given approle + community.hashi_vault.vault_write: + url: https://vault:8201 + path: auth/approle/role/role-name/secret-id + register: secret_id + +- name: Display the role ID and secret ID + ansible.builtin.debug: + msg: + - "role-id: {{ approle_id.data.data.role_id }}" + - "secret-id: {{ secret_id.data.data.secret_id }}" +""" + +RETURN = """ +data: + description: The raw result of the write against the given path. + returned: success + type: dict +""" + +import traceback + +from ansible.module_utils._text import to_native +from ansible.module_utils.basic import missing_required_lib + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_module import HashiVaultModule +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultValueError + +try: + import hvac +except ImportError: + HAS_HVAC = False + HVAC_IMPORT_ERROR = traceback.format_exc() +else: + HVAC_IMPORT_ERROR = None + HAS_HVAC = True + + +def run_module(): + argspec = HashiVaultModule.generate_argspec( + path=dict(type='str', required=True), + data=dict(type='dict', required=False, default={}), + wrap_ttl=dict(type='str'), + ) + + module = HashiVaultModule( + argument_spec=argspec, + supports_check_mode=True + ) + + if not HAS_HVAC: + module.fail_json( + msg=missing_required_lib('hvac'), + exception=HVAC_IMPORT_ERROR + ) + + path = module.params.get('path') + data = module.params.get('data') + wrap_ttl = module.params.get('wrap_ttl') + + module.connection_options.process_connection_options() + client_args = module.connection_options.get_hvac_connection_options() + client = module.helper.get_vault_client(**client_args) + + try: + module.authenticator.validate() + module.authenticator.authenticate(client) + except (NotImplementedError, HashiVaultValueError) as e: + module.fail_json(msg=to_native(e), exception=traceback.format_exc()) + + try: + if module.check_mode: + response = {} + else: + response = client.write(path=path, wrap_ttl=wrap_ttl, **data) + except hvac.exceptions.Forbidden: + module.fail_json(msg="Forbidden: Permission Denied to path '%s'." % path, exception=traceback.format_exc()) + except hvac.exceptions.InvalidPath: + module.fail_json(msg="The path '%s' doesn't seem to exist." % path, exception=traceback.format_exc()) + except hvac.exceptions.InternalServerError as e: + module.fail_json(msg="Internal Server Error: %s" % to_native(e), exception=traceback.format_exc()) + + # https://github.com/hvac/hvac/issues/797 + # HVAC returns a raw response object when the body is not JSON. + # That includes 204 responses, which are successful with no body. + # So we will try to detect that and a act accordingly. + # A better way may be to implement our own adapter for this + # collection, but it's a little premature to do that. + if hasattr(response, 'json') and callable(response.json): + if response.status_code == 204: + output = {} + else: + module.warn('Vault returned status code %i and an unparsable body.' % response.status_code) + output = response.content + else: + output = response + + module.exit_json(changed=True, data=output) + + +def main(): + run_module() + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/community/hashi_vault/plugins/plugin_utils/_hashi_vault_lookup_base.py b/ansible_collections/community/hashi_vault/plugins/plugin_utils/_hashi_vault_lookup_base.py new file mode 100644 index 000000000..7a878cbb4 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/plugin_utils/_hashi_vault_lookup_base.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +# FOR INTERNAL COLLECTION USE ONLY +# The interfaces in this file are meant for use within the community.hashi_vault collection +# and may not remain stable to outside uses. Changes may be made in ANY release, even a bugfix release. +# See also: https://github.com/ansible/community/issues/539#issuecomment-780839686 +# Please open an issue if you have questions about this. + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +from ansible.errors import AnsibleError +from ansible.plugins.lookup import LookupBase +from ansible.utils.display import Display + +from ..plugin_utils._hashi_vault_plugin import HashiVaultPlugin + +display = Display() + + +class HashiVaultLookupBase(HashiVaultPlugin, LookupBase): + + def __init__(self, loader=None, templar=None, **kwargs): + HashiVaultPlugin.__init__(self) + LookupBase.__init__(self, loader=loader, templar=templar, **kwargs) + + def parse_kev_term(self, term, plugin_name, first_unqualified=None): + '''parses a term string into a dictionary''' + param_dict = {} + + for i, param in enumerate(term.split()): + try: + key, value = param.split('=', 1) + except ValueError: + if i == 0 and first_unqualified is not None: + # allow first item to be specified as value only and assign to assumed option name + key = first_unqualified + value = param + else: + raise AnsibleError("%s lookup plugin needs key=value pairs, but received %s" % (plugin_name, term)) + + if key in param_dict: + removed_in = '5.0.0' + msg = "Duplicate key '%s' in the term string '%s'." % (key, term) + display.deprecated(msg + "\nIn version %s of the collection, this will raise an exception." % (removed_in, ), removed_in) + # TODO: v5.0.0: remove deprecation message, uncomment: https://github.com/ansible-collections/community.hashi_vault/pull/350 + # raise AnsibleOptionsError(msg) + + param_dict[key] = value + + return param_dict diff --git a/ansible_collections/community/hashi_vault/plugins/plugin_utils/_hashi_vault_plugin.py b/ansible_collections/community/hashi_vault/plugins/plugin_utils/_hashi_vault_plugin.py new file mode 100644 index 000000000..65c17c277 --- /dev/null +++ b/ansible_collections/community/hashi_vault/plugins/plugin_utils/_hashi_vault_plugin.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +# FOR INTERNAL COLLECTION USE ONLY +# The interfaces in this file are meant for use within the community.hashi_vault collection +# and may not remain stable to outside uses. Changes may be made in ANY release, even a bugfix release. +# See also: https://github.com/ansible/community/issues/539#issuecomment-780839686 +# Please open an issue if you have questions about this. + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +from ansible.plugins import AnsiblePlugin +from ansible import constants as C +from ansible.utils.display import Display + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import ( + HashiVaultHelper, + HashiVaultOptionAdapter, +) + +from ansible_collections.community.hashi_vault.plugins.module_utils._connection_options import HashiVaultConnectionOptions +from ansible_collections.community.hashi_vault.plugins.module_utils._authenticator import HashiVaultAuthenticator + + +display = Display() + + +class HashiVaultPlugin(AnsiblePlugin): + def __init__(self): + super(HashiVaultPlugin, self).__init__() + + self.helper = HashiVaultHelper() + self._options_adapter = HashiVaultOptionAdapter.from_ansible_plugin(self) + self.connection_options = HashiVaultConnectionOptions(self._options_adapter, self._generate_retry_callback) + self.authenticator = HashiVaultAuthenticator(self._options_adapter, display.warning, display.deprecated) + + def _generate_retry_callback(self, retry_action): + '''returns a Retry callback function for plugins''' + def _on_retry(retry_obj): + if retry_obj.total > 0: + if retry_action == 'warn': + display.warning('community.hashi_vault: %i %s remaining.' % (retry_obj.total, 'retry' if retry_obj.total == 1 else 'retries')) + else: + pass + + return _on_retry + + def process_deprecations(self, collection_name='community.hashi_vault'): + '''processes deprecations related to the collection''' + + # TODO: this is a workaround for deprecations not being shown in lookups + # See: + # - https://github.com/ansible/ansible/issues/73051 + # - https://github.com/ansible/ansible/pull/73058 + # - https://github.com/ansible/ansible/pull/73239 + # - https://github.com/ansible/ansible/pull/73240 + # + # If a fix is backported to 2.9, this should be removed. + # Otherwise, we'll have to test with fixes that are available and see how we + # can determine whether to execute this conditionally. + + # nicked from cli/__init__.py + # with slight customizations to help filter out relevant messages + # (relying on the collection name since it's a valid attrib and we only have 1 plugin at this time) + + # warn about deprecated config options + + for deprecated in list(C.config.DEPRECATED): + name = deprecated[0] + why = deprecated[1]['why'] + if deprecated[1].get('collection_name') != collection_name: + continue + + if 'alternatives' in deprecated[1]: + alt = ', use %s instead' % deprecated[1]['alternatives'] + else: + alt = '' + ver = deprecated[1].get('version') + date = deprecated[1].get('date') + collection_name = deprecated[1].get('collection_name') + display.deprecated("%s option, %s%s" % (name, why, alt), version=ver, date=date, collection_name=collection_name) + + # remove this item from the list so it won't get processed again by something else + C.config.DEPRECATED.remove(deprecated) diff --git a/ansible_collections/community/hashi_vault/tests/config.yml b/ansible_collections/community/hashi_vault/tests/config.yml new file mode 100644 index 000000000..fef57dfef --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/config.yml @@ -0,0 +1,44 @@ +# Sample ansible-test configuration file for collections. +# Support for this feature was first added in ansible-core 2.12. +# Use of this file is optional. +# If used, this file must be placed in "tests/config.yml" relative to the base of the collection. +# see also: +# - https://github.com/ansible-collections/overview/issues/45#issuecomment-827853900 +# - https://github.com/ansible/ansible/blob/devel/test/lib/ansible_test/config/config.yml + +modules: + # Configuration for modules/module_utils. + # These settings do not apply to other content in the collection. + + python_requires: '>=3.6' + # Python versions supported by modules/module_utils. + # This setting is required. + # + # Possible values: + # + # - 'default' - All Python versions supported by Ansible. + # This is the default value if no configuration is provided. + # - 'controller' - All Python versions supported by the Ansible controller. + # This indicates the modules/module_utils can only run on the controller. + # Intended for use only with modules/module_utils that depend on ansible-connection, which only runs on the controller. + # Unit tests for modules/module_utils will be permitted to import any Ansible code, instead of only module_utils. + # - SpecifierSet - A PEP 440 specifier set indicating the supported Python versions. + # This is only needed when modules/module_utils do not support all Python versions supported by Ansible. + # It is not necessary to exclude versions which Ansible does not support, as this will be done automatically. + # + # What does this affect? + # + # - Unit tests will be skipped on any unsupported Python version. + # - Sanity tests that are Python version specific will be skipped on any unsupported Python version that is not supported by the controller. + # + # Sanity tests that are Python version specific will always be executed for Python versions supported by the controller, regardless of this setting. + # Reasons for this restriction include, but are not limited to: + # + # - AnsiballZ must be able to AST parse modules/module_utils on the controller, even though they may execute on a managed node. + # - ansible-doc must be able to AST parse modules/module_utils on the controller to display documentation. + # - ansible-test must be able to AST parse modules/module_utils to perform static analysis on them. + # - ansible-test must be able to execute portions of modules/module_utils to validate their argument specs. + # + # These settings only apply to modules/module_utils. + # It is not possible to declare supported Python versions for controller-only code. + # All Python versions supported by the controller must be supported by controller-only code. diff --git a/ansible_collections/community/hashi_vault/tests/integration/.ansible-lint b/ansible_collections/community/hashi_vault/tests/integration/.ansible-lint new file mode 100644 index 000000000..274a44617 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/.ansible-lint @@ -0,0 +1,10 @@ +# .ansible-lint file for integration tests +--- +skip_list: + - unnamed-task + - truthy + - var-naming + - meta-no-info + - ignore-errors + - risky-file-permissions + - command-instead-of-shell diff --git a/ansible_collections/community/hashi_vault/tests/integration/integration.cfg b/ansible_collections/community/hashi_vault/tests/integration/integration.cfg new file mode 100644 index 000000000..289e4ca82 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/integration.cfg @@ -0,0 +1,6 @@ +# this is the ansible.cfg file used for integration tests + +[hashi_vault_collection] +# if any connections are taking longer than this to complete there's probably something really wrong +# with the integration tests, so it'd be better to fail faster than the 30s default +timeout = 5 diff --git a/ansible_collections/community/hashi_vault/tests/integration/integration_config.yml.sample b/ansible_collections/community/hashi_vault/tests/integration/integration_config.yml.sample new file mode 100644 index 000000000..489b41e0e --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/integration_config.yml.sample @@ -0,0 +1,16 @@ +# Running integration tests requires you set up an integration_config.yml that +# tells the tests where to find Vault and other dependencies. +# See the contributor guide at: +# https://docs.ansible.com/ansible/devel/collections/community/hashi_vault/docsite/contributor_guide.html#integration-tests +--- +# this is just a small sample of what the file looks like, +# this file is not valid on its own. See the guide for details. + +vault_version: latest + +vault_dev_root_token_id: 47542cbc-6bf8-4fba-8eda-02e0a0d29a0a + +vault_proxy_server: 'http://127.0.0.1:8001' + +vault_test_server_http: http://localhost:8200 +vault_test_server_https: https://localhost:8300 diff --git a/ansible_collections/community/hashi_vault/tests/integration/requirements.txt b/ansible_collections/community/hashi_vault/tests/integration/requirements.txt new file mode 100644 index 000000000..033733f7c --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/requirements.txt @@ -0,0 +1,16 @@ +# the collection supports python 3.6 and higher, however the constraints for +# earlier python versions are still needed for Ansible < 2.12 which doesn't +# support tests/config.yml, so that unit tests (which will be skipped) won't +# choke on installing requirements. + +hvac >= 0.10.6, != 0.10.12, != 0.10.13, < 1.0.0 ; python_version == '2.7' # bugs in 0.10.12 and 0.10.13 prevent it from working in Python 2 +hvac >= 0.10.6, < 1.0.0 ; python_version == '3.5' # py3.5 support will be dropped in 1.0.0 +hvac >= 0.10.6 ; python_version >= '3.6' + +# these should be satisfied naturally by the requests versions required by hvac anyway +urllib3 >= 1.15 ; python_version >= '3.6' # we need raise_on_status for retry support to raise the correct exceptions https://github.com/urllib3/urllib3/blob/main/CHANGES.rst#115-2016-04-06 +urllib3 >= 1.15, <2.0.0 ; python_version < '3.6' # https://urllib3.readthedocs.io/en/latest/v2-roadmap.html#optimized-for-python-3-6 + +# azure-identity 1.7.0 depends on cryptography 2.5 which drops python 2.6 support +azure-identity < 1.7.0; python_version < '2.7' +azure-identity; python_version >= '2.7' diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_approle/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_approle/aliases new file mode 100644 index 000000000..637b4fbf9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_approle/aliases @@ -0,0 +1,2 @@ +vault/auth/approle +context/target diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_approle/defaults/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_approle/defaults/main.yml new file mode 100644 index 000000000..86577ee4c --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_approle/defaults/main.yml @@ -0,0 +1,14 @@ +--- +ansible_hashi_vault_url: '{{ vault_test_server_http }}' +ansible_hashi_vault_auth_method: approle + +auth_paths: + - approle + - approle-alt + +secret_id_role: req-secret-id-role +no_secret_id_role: no-secret-id-role + +vault_approle_canary: + path: cubbyhole/configure_approle + value: complete # value does not matter diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_approle/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_approle/meta/main.yml new file mode 100644 index 000000000..d3acb69e9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_approle/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - setup_vault_test_plugins + - setup_vault_configure diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_approle/tasks/approle_setup.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_approle/tasks/approle_setup.yml new file mode 100644 index 000000000..0a0a120ea --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_approle/tasks/approle_setup.yml @@ -0,0 +1,38 @@ +- name: "Setup block" + vars: + is_default_path: "{{ this_path == default_path }}" + block: + - name: 'Enable the approle auth method' + vault_ci_enable_auth: + method_type: approle + path: '{{ omit if is_default_path else this_path }}' + config: + default_lease_ttl: 60m + + - name: 'Create an approle policy' + vault_ci_policy_put: + name: approle-policy + policy: | + path "auth/{{ this_path }}/login" { + capabilities = [ "create", "read" ] + } + + - name: 'Create a named role (secret ID required)' + vault_ci_write: + path: 'auth/{{ this_path }}/role/{{ secret_id_role }}' + data: + # in docs, this is token_policies (changed in Vault 1.2) + # use 'policies' to support older versions + policies: "{{ 'test-policy' if is_default_path else 'alt-policy' }},approle-policy" + secret_id_ttl: 60m + + - name: 'Create a named role (without secret id)' + vault_ci_write: + path: 'auth/{{ this_path }}/role/{{ no_secret_id_role }}' + data: + # in docs, this is token_policies (changed in Vault 1.2) + # use 'policies' to support older versions + policies: "{{ 'test-policy' if is_default_path else 'alt-policy' }},approle-policy" + secret_id_ttl: 60m + bind_secret_id: false + secret_id_bound_cidrs: '0.0.0.0/0' diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_approle/tasks/approle_test_controller.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_approle/tasks/approle_test_controller.yml new file mode 100644 index 000000000..8cf331b1d --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_approle/tasks/approle_test_controller.yml @@ -0,0 +1,55 @@ +- name: "Test block" + vars: + is_default_path: "{{ this_path == default_path }}" + kwargs_mount: "{{ {} if is_default_path else {'mount_point': this_path} }}" + kwargs_secret_id: {} + kwargs: "{{ kwargs_common | combine(kwargs_mount) | combine(kwargs_secret_id) }}" + block: + - name: 'Fetch the RoleID of the AppRole' + vault_ci_read: + path: 'auth/{{ this_path }}/role/{{ approle_name }}/role-id' + register: role_id_cmd + + - name: Set common args + set_fact: + kwargs_common: + role_id: '{{ role_id_cmd.result.data.role_id }}' + + - name: 'Get a SecretID issued against the AppRole' + when: use_secret_id | bool + vault_ci_write: + path: 'auth/{{ this_path }}/role/{{ approle_name }}/secret-id' + data: {} + register: secret_id_cmd + + # the purpose of this test is to catch when the plugin accepts mount_point but does not pass it into hvac + # we set the policy of the default mount to deny access to this secret and so we expect failure when the mount + # is default, and success when the mount is alternate + - name: Check auth mount differing result + vars: + kwargs_secret_id: | + {% if use_secret_id | bool %} + {{ {'secret_id': secret_id_cmd.result.data.secret_id} }} + {% else %} + {{ {} }} + {% endif %} + set_fact: + response: "{{ lookup('vault_test_auth', **kwargs) }}" + + - assert: + fail_msg: "A token from mount path '{{ this_path }}' had the wrong policy: {{ response.login.auth.policies }}" + that: + - ('test-policy' in response.login.auth.policies) | bool == is_default_path + - ('test-policy' not in response.login.auth.policies) | bool != is_default_path + - ('alt-policy' in response.login.auth.policies) | bool != is_default_path + - ('alt-policy' not in response.login.auth.policies) | bool == is_default_path + + - name: Failure expected when erroneous credentials are used and secret ID is required + set_fact: + response: "{{ lookup('vault_test_auth', secret_id='fake', want_exception=true, **kwargs) }}" + + - assert: + fail_msg: "An invalid secret ID somehow did not cause a failure." + that: + - (response is failed) == use_secret_id + - not use_secret_id or response.msg is search('invalid secret id') diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_approle/tasks/approle_test_target.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_approle/tasks/approle_test_target.yml new file mode 100644 index 000000000..1f5796957 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_approle/tasks/approle_test_target.yml @@ -0,0 +1,49 @@ +- name: "Test block" + vars: + is_default_path: "{{ this_path == default_path }}" + module_defaults: + vault_test_auth: + url: '{{ ansible_hashi_vault_url }}' + auth_method: '{{ ansible_hashi_vault_auth_method }}' + mount_point: '{{ omit if is_default_path else this_path }}' + role_id: '{{ role_id_cmd.result.data.role_id | default(omit) }}' + block: + - name: 'Fetch the RoleID of the AppRole' + vault_ci_read: + path: 'auth/{{ this_path }}/role/{{ approle_name }}/role-id' + register: role_id_cmd + + - name: 'Get a SecretID issued against the AppRole' + when: use_secret_id | bool + vault_ci_write: + path: 'auth/{{ this_path }}/role/{{ approle_name }}/secret-id' + data: {} + register: secret_id_cmd + + # the purpose of this test is to catch when the plugin accepts mount_point but does not pass it into hvac + # we set the policy of the default mount to deny access to this secret and so we expect failure when the mount + # is default, and success when the mount is alternate + - name: Check auth mount differing result + register: response + vault_test_auth: + secret_id: "{{ secret_id_cmd.result.data.secret_id if (use_secret_id | bool) else omit }}" + + - assert: + fail_msg: "A token from mount path '{{ this_path }}' had the wrong policy: {{ response.login.auth.policies }}" + that: + - ('test-policy' in response.login.auth.policies) | bool == is_default_path + - ('test-policy' not in response.login.auth.policies) | bool != is_default_path + - ('alt-policy' in response.login.auth.policies) | bool != is_default_path + - ('alt-policy' not in response.login.auth.policies) | bool == is_default_path + + - name: Failure expected when erroneous credentials are used and secret ID is required + register: response + vault_test_auth: + secret_id: fake + want_exception: yes + + - assert: + fail_msg: "An invalid secret ID somehow did not cause a failure." + that: + - (response.inner is failed) == use_secret_id + - not use_secret_id or response.msg is search('invalid secret id') diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_approle/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_approle/tasks/main.yml new file mode 100644 index 000000000..79dfd38d7 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_approle/tasks/main.yml @@ -0,0 +1,52 @@ +--- +# task vars are not templated when used as vars, so we'll need to set_fact this evaluate the template +# see: https://github.com/ansible/ansible/issues/73268 +- name: Persist defaults + set_fact: + '{{ item.key }}': "{{ lookup('vars', item.key) }}" + loop: "{{ lookup('file', role_path ~ '/defaults/main.yml') | from_yaml | dict2items }}" + loop_control: + label: '{{ item.key }}' + +- name: Configuration tasks + module_defaults: + vault_ci_enable_auth: '{{ vault_plugins_module_defaults_common }}' + vault_ci_policy_put: '{{ vault_plugins_module_defaults_common }}' + vault_ci_write: '{{ vault_plugins_module_defaults_common }}' + vault_ci_read: '{{ vault_plugins_module_defaults_common }}' + block: + - name: Canary for approle auth + vault_ci_read: + path: '{{ vault_approle_canary.path }}' + register: canary + + - name: Configure approle + when: canary.result is none + loop: '{{ auth_paths }}' + include_tasks: + file: approle_setup.yml + apply: + vars: + default_path: '{{ ansible_hashi_vault_auth_method }}' + this_path: '{{ item }}' + + - name: Write Canary + when: canary.result is none + vault_ci_write: + path: '{{ vault_approle_canary.path }}' + data: + value: '{{ vault_approle_canary.value }}' + + - name: Run approle tests + loop: '{{ auth_paths | product([secret_id_role, no_secret_id_role]) | product(["target", "controller"]) | list }}' + include_tasks: + file: approle_test_{{ item[1] }}.yml + apply: + vars: + default_path: '{{ ansible_hashi_vault_auth_method }}' + this_path: '{{ item[0][0] }}' + approle_name: '{{ item[0][1] }}' + use_secret_id: '{{ item[0][1] == secret_id_role }}' + module_defaults: + assert: + quiet: yes diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_aws_iam/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_aws_iam/aliases new file mode 100644 index 000000000..b26fb3121 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_aws_iam/aliases @@ -0,0 +1,2 @@ +vault/auth/aws_iam +context/target diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_aws_iam/defaults/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_aws_iam/defaults/main.yml new file mode 100644 index 000000000..642fd18bb --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_aws_iam/defaults/main.yml @@ -0,0 +1,10 @@ +--- +ansible_hashi_vault_url: '{{ vault_mmock_server_http }}' +ansible_hashi_vault_auth_method: aws_iam + +auth_paths: + - aws + - aws-alt + +aws_access_key: abc +aws_secret_key: xyz diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_aws_iam/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_aws_iam/meta/main.yml new file mode 100644 index 000000000..d3acb69e9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_aws_iam/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - setup_vault_test_plugins + - setup_vault_configure diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_aws_iam/tasks/aws_iam_test_controller.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_aws_iam/tasks/aws_iam_test_controller.yml new file mode 100644 index 000000000..60ec9cedf --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_aws_iam/tasks/aws_iam_test_controller.yml @@ -0,0 +1,33 @@ +- name: "Test block" + vars: + is_default_path: "{{ this_path == default_path }}" + kwargs_mount: "{{ {} if is_default_path else {'mount_point': this_path} }}" + kwargs_common: + aws_access_key: '{{ aws_access_key }}' + aws_secret_key: '{{ aws_secret_key }}' + kwargs: "{{ kwargs_common | combine(kwargs_mount) }}" + block: + # the purpose of this test is to catch when the plugin accepts mount_point but does not pass it into hvac + # we set the policy of the default mount to deny access to this secret and so we expect failure when the mount + # is default, and success when the mount is alternate + - name: Check auth mount differing result + set_fact: + response: "{{ lookup('vault_test_auth', role_id='not-important', **kwargs) }}" + + - assert: + fail_msg: "A token from mount path '{{ this_path }}' had the wrong policy: {{ response.login.auth.policies }}" + that: + - ('aws-sample-policy' in response.login.auth.policies) | bool == is_default_path + - ('aws-sample-policy' not in response.login.auth.policies) | bool != is_default_path + - ('aws-alt-sample-policy' in response.login.auth.policies) | bool != is_default_path + - ('aws-alt-sample-policy' not in response.login.auth.policies) | bool == is_default_path + + - name: Failure expected when something goes wrong (simulated) + set_fact: + response: "{{ lookup('vault_test_auth', role_id='fail-me-role', want_exception=true, **kwargs) }}" + + - assert: + fail_msg: "An invalid request somehow did not cause a failure." + that: + - response is failed + - response.msg is search('<Error>') diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_aws_iam/tasks/aws_iam_test_target.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_aws_iam/tasks/aws_iam_test_target.yml new file mode 100644 index 000000000..0f4267f13 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_aws_iam/tasks/aws_iam_test_target.yml @@ -0,0 +1,38 @@ +- name: "Test block" + vars: + is_default_path: "{{ this_path == default_path }}" + module_defaults: + vault_test_auth: + url: '{{ ansible_hashi_vault_url }}' + auth_method: '{{ ansible_hashi_vault_auth_method }}' + mount_point: '{{ omit if is_default_path else this_path }}' + aws_access_key: '{{ aws_access_key }}' + aws_secret_key: '{{ aws_secret_key }}' + role_id: not-important + block: + # the purpose of this test is to catch when the plugin accepts mount_point but does not pass it into hvac + # we set the policy of the default mount to deny access to this secret and so we expect failure when the mount + # is default, and success when the mount is alternate + - name: Check auth mount differing result + register: response + vault_test_auth: + + - assert: + fail_msg: "A token from mount path '{{ this_path }}' had the wrong policy: {{ response.login.auth.policies }}" + that: + - ('aws-sample-policy' in response.login.auth.policies) | bool == is_default_path + - ('aws-sample-policy' not in response.login.auth.policies) | bool != is_default_path + - ('aws-alt-sample-policy' in response.login.auth.policies) | bool != is_default_path + - ('aws-alt-sample-policy' not in response.login.auth.policies) | bool == is_default_path + + - name: Failure expected when something goes wrong (simulated) + register: response + vault_test_auth: + role_id: fail-me-role + want_exception: yes + + - assert: + fail_msg: "An invalid request somehow did not cause a failure." + that: + - response.inner is failed + - response.msg is search('<Error>') diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_aws_iam/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_aws_iam/tasks/main.yml new file mode 100644 index 000000000..50c57e5c6 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_aws_iam/tasks/main.yml @@ -0,0 +1,23 @@ +--- +# task vars are not templated when used as vars, so we'll need to set_fact this evaluate the template +# see: https://github.com/ansible/ansible/issues/73268 +- name: Persist defaults + set_fact: + '{{ item.key }}': "{{ lookup('vars', item.key) }}" + loop: "{{ lookup('file', role_path ~ '/defaults/main.yml') | from_yaml | dict2items }}" + loop_control: + label: '{{ item.key }}' + +# there's no setup for this auth method because its API is mocked + +- name: Run aws_iam tests + loop: '{{ auth_paths | product(["target", "controller"]) | list }}' + include_tasks: + file: aws_iam_test_{{ item[1] }}.yml + apply: + vars: + default_path: aws + this_path: '{{ item[0] }}' + module_defaults: + assert: + quiet: yes diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_azure/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_azure/aliases new file mode 100644 index 000000000..32ccc0d36 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_azure/aliases @@ -0,0 +1,2 @@ +vault/auth/azure +context/target diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_azure/defaults/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_azure/defaults/main.yml new file mode 100644 index 000000000..16540fbcd --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_azure/defaults/main.yml @@ -0,0 +1,10 @@ +# Copyright (c) 2022 Junrui Chen (@jchenship) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later +--- +ansible_hashi_vault_url: '{{ vault_mmock_server_http }}' +ansible_hashi_vault_auth_method: azure + +auth_paths: + - azure + - azure-alt diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_azure/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_azure/meta/main.yml new file mode 100644 index 000000000..fa4fc1c74 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_azure/meta/main.yml @@ -0,0 +1,7 @@ +# Copyright (c) 2022 Junrui Chen (@jchenship) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later +--- +dependencies: + - setup_vault_test_plugins + - setup_vault_configure diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_azure/tasks/azure_test_controller.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_azure/tasks/azure_test_controller.yml new file mode 100644 index 000000000..8b38c0d73 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_azure/tasks/azure_test_controller.yml @@ -0,0 +1,48 @@ +# Copyright (c) 2022 Junrui Chen (@jchenship) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later +--- +- name: "Test block" + vars: + is_default_path: "{{ this_path == default_path }}" + kwargs_mount: "{{ {} if is_default_path else {'mount_point': this_path} }}" + kwargs_common: + jwt: azure-jwt + kwargs: "{{ kwargs_common | combine(kwargs_mount) }}" + block: + # the purpose of this test is to catch when the plugin accepts mount_point but does not pass it into hvac + # we set the policy of the default mount to deny access to this secret and so we expect failure when the mount + # is default, and success when the mount is alternate + - name: Check auth mount differing result + set_fact: + response: "{{ lookup('vault_test_auth', role_id='not-important', **kwargs) }}" + + - assert: + fail_msg: "A token from mount path '{{ this_path }}' had the wrong policy: {{ response.login.auth.policies }}" + that: + - ('azure-sample-policy' in response.login.auth.policies) | bool == is_default_path + - ('azure-sample-policy' not in response.login.auth.policies) | bool != is_default_path + - ('azure-alt-sample-policy' in response.login.auth.policies) | bool != is_default_path + - ('azure-alt-sample-policy' not in response.login.auth.policies) | bool == is_default_path + + - name: Failure expected when something goes wrong (simulated) + set_fact: + response: "{{ lookup('vault_test_auth', role_id='fail-me-role', want_exception=true, **kwargs) }}" + + - assert: + fail_msg: "An invalid request somehow did not cause a failure." + that: + - response is failed + - response.msg is search('expected audience .+ got .+') + + - name: Failure expected when role_id is not given + set_fact: + response: "{{ lookup('vault_test_auth', want_exception=true, **kwargs) }}" + + - assert: + fail_msg: | + Missing role_id did not cause an expected failure. + {{ response }} + that: + - response is failed + - response.msg is search('^role_id is required for azure authentication\.$') diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_azure/tasks/azure_test_target.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_azure/tasks/azure_test_target.yml new file mode 100644 index 000000000..0c637c706 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_azure/tasks/azure_test_target.yml @@ -0,0 +1,54 @@ +# Copyright (c) 2022 Junrui Chen (@jchenship) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later +--- +- name: "Test block" + vars: + is_default_path: "{{ this_path == default_path }}" + module_defaults: + vault_test_auth: + url: '{{ ansible_hashi_vault_url }}' + auth_method: '{{ ansible_hashi_vault_auth_method }}' + mount_point: '{{ omit if is_default_path else this_path }}' + jwt: azure-jwt + block: + # the purpose of this test is to catch when the plugin accepts mount_point but does not pass it into hvac + # we set the policy of the default mount to deny access to this secret and so we expect failure when the mount + # is default, and success when the mount is alternate + - name: Check auth mount differing result + register: response + vault_test_auth: + role_id: not-important + + - assert: + fail_msg: "A token from mount path '{{ this_path }}' had the wrong policy: {{ response.login.auth.policies }}" + that: + - ('azure-sample-policy' in response.login.auth.policies) | bool == is_default_path + - ('azure-sample-policy' not in response.login.auth.policies) | bool != is_default_path + - ('azure-alt-sample-policy' in response.login.auth.policies) | bool != is_default_path + - ('azure-alt-sample-policy' not in response.login.auth.policies) | bool == is_default_path + + - name: Failure expected when something goes wrong (simulated) + register: response + vault_test_auth: + role_id: fail-me-role + want_exception: yes + + - assert: + fail_msg: "An invalid request somehow did not cause a failure." + that: + - response.inner is failed + - response.msg is search('expected audience .+ got .+') + + - name: Failure expected when role_id is not given + register: response + vault_test_auth: + want_exception: yes + + - assert: + fail_msg: | + Missing role_id did not cause an expected failure. + {{ response }} + that: + - response.inner is failed + - response.msg is search('^role_id is required for azure authentication\.$') diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_azure/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_azure/tasks/main.yml new file mode 100644 index 000000000..95bde76e5 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_azure/tasks/main.yml @@ -0,0 +1,26 @@ +# Copyright (c) 2022 Junrui Chen (@jchenship) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later +--- +# task vars are not templated when used as vars, so we'll need to set_fact this evaluate the template +# see: https://github.com/ansible/ansible/issues/73268 +- name: Persist defaults + set_fact: + '{{ item.key }}': "{{ lookup('vars', item.key) }}" + loop: "{{ lookup('file', role_path ~ '/defaults/main.yml') | from_yaml | dict2items }}" + loop_control: + label: '{{ item.key }}' + +# there's no setup for this auth method because its API is mocked + +- name: Run azure tests + loop: '{{ auth_paths | product(["target", "controller"]) | list }}' + include_tasks: + file: azure_test_{{ item[1] }}.yml + apply: + vars: + default_path: azure + this_path: '{{ item[0] }}' + module_defaults: + assert: + quiet: yes diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/aliases new file mode 100644 index 000000000..13ba3a468 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/aliases @@ -0,0 +1,2 @@ +vault/auth/cert +context/target diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/defaults/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/defaults/main.yml new file mode 100644 index 000000000..4170d71f9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/defaults/main.yml @@ -0,0 +1,19 @@ +--- +vault_run_https_tests: True + +ansible_hashi_vault_url: '{{ vault_test_server_http }}' +ansible_hashi_vault_auth_method: cert + +auth_paths: + - cert + - cert-alt + +vault_cert_canary: + path: cubbyhole/configure_cert + value: complete # value does not matter + +auth_cert_cn: vault-test +auth_cert_cert: "{{ role_path }}/files/auth_cert.crt" +auth_cert_key: "{{ role_path }}/files/auth_cert.key" +invalid_auth_cert_cert: "{{ role_path }}/files/auth_cert_invalid.crt" +invalid_auth_cert_key: "{{ role_path }}/files/auth_cert_invalid.key" diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/files/auth_cert.crt b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/files/auth_cert.crt new file mode 100644 index 000000000..f0cbfc1db --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/files/auth_cert.crt @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDIjCCAgqgAwIBAgIUcyvqaCXttUgXhdmM8QJgCmaqsK4wDQYJKoZIhvcNAQEL +BQAwFTETMBEGA1UEAwwKdmF1bHQtdGVzdDAgFw0yMTEwMTkwNjAyMzFaGA8yMTIx +MDkyNTA2MDIzMVowFTETMBEGA1UEAwwKdmF1bHQtdGVzdDCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAMnAUmlSnZk3RBSs7iW5TPXAkUgdYhOlJKT4xRE1 +Ta3rZo1NaPas6rh0fl4lkNHB/OvzaVhNjgptkTXarccMOMhGiQUxhQEfKW+KzqIy +qsXXk68aj/xIgVWu1/IoVUYx89LUHE2Xdd5aO95w1a6goox96spVZp7UaWSb8/V9 +SGUexgAJMDGrGTPGbXD2pSsBfDWg5ysxiptZmUfeMWtZ/1OJJ9eFuHjW+tDCuOHe +HcekiKwM90CjvqeEhGdgEtLGVDkT4ud2u7YKeZXxIe5UTML20paCzd12v/LOIlM2 +ZEqhySVrBvl+Wzv5BLmffW62UPWXlRDZa9FaCAWG/94F/CMCAwEAAaNoMGYwHQYD +VR0OBBYEFGZFLq7ROcjYDf3n2A+KSe73zFOuMB8GA1UdIwQYMBaAFGZFLq7ROcjY +Df3n2A+KSe73zFOuMA8GA1UdEwEB/wQFMAMBAf8wEwYDVR0lBAwwCgYIKwYBBQUH +AwIwDQYJKoZIhvcNAQELBQADggEBAIe65BTFlTOFtUkxV1Zf3eOE6Lq6HZqzvNmK +1sCDdT1kL080P5Y6pcIeG99+mikN8b9Csh9CB/AqB5WjAF3Hfdg2EHd4d6gwjUwB +t5+hw83FQvckxEQ80ZVn2C83aZ9xIOgLqB2QnWJKGWp47816+/IqNo07NWvttLgM +L5e2cX6Ass1nT4Bjh8P4NQgg2lMDCzwwAZBR5D0mCSO8MbO1Ud8KRfaSIUczemlJ +PFNl3r3hI6efKCVfsbVcbnopuvPgcW4BH1KXZOLEnZm1lUdKCAYjA5g1rCGhKSTC +p/d72HTzqSHlgyEish7ueEeY0Z1sMDWvUZLu46GCGOf5pozxoQo= +-----END CERTIFICATE----- diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/files/auth_cert.key b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/files/auth_cert.key new file mode 100644 index 000000000..08e3c2ff7 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/files/auth_cert.key @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDJwFJpUp2ZN0QU +rO4luUz1wJFIHWITpSSk+MURNU2t62aNTWj2rOq4dH5eJZDRwfzr82lYTY4KbZE1 +2q3HDDjIRokFMYUBHylvis6iMqrF15OvGo/8SIFVrtfyKFVGMfPS1BxNl3XeWjve +cNWuoKKMferKVWae1Glkm/P1fUhlHsYACTAxqxkzxm1w9qUrAXw1oOcrMYqbWZlH +3jFrWf9TiSfXhbh41vrQwrjh3h3HpIisDPdAo76nhIRnYBLSxlQ5E+Lndru2CnmV +8SHuVEzC9tKWgs3ddr/yziJTNmRKocklawb5fls7+QS5n31utlD1l5UQ2WvRWggF +hv/eBfwjAgMBAAECggEAXhehF9fdAokg6legld1vBCp0V2LEzA64IWyYVCc8/EEO +ShZxPqJ0seQ6z2PLv9guQkj6t6Er/rcNA0XiyDNuBBHJc6+drWFALPJa3pCszqp1 +CTRlNK8ICtMJibm/04YWhhxAuvtJkJMa2upa7h+iz80mBImB28+K+840ICj265lz +lua32UCMXw0lDPUm8Ud9lVYKy1tTy9IMOxnwsplXFxtARlxoyn0/f4e1v+4MT1zQ +gm3EXkr7xNK8gEnYB87RZBphpS1QPah3ZQZc81imgeZLEW+bxSKHlTp6qk9OMZYa +VfMNM5WeDEZw7aWaOSrWn2Vc2K2eCZpbV99FAMzDYQKBgQDlMWD9yoCYh46x1C5N +A4DCMkpcoPxQkFSWevUnLVxlvMybKleUrL9EUzJ58Ps2d2Yn5KfS8xjxwrBEF7DW +ReMtEmUhx3PeF3GdIUnsE6ePUgVysbtXHjPcj8LiRXsOrDIQ6g55qNrVczngGqmV +e8UP6fx15QbnTho00aU17K+J0wKBgQDhWUU/7gjtk1Z+3Qeyu5Rm65RirVWJ2lM/ +NluLX132uL831+X9v22r8o1P40cXJJ05baX2VCoR88Z6oVxD5g4eOS+scwwTSmIK +3n07awPDffw/Em3I3zhUN26ExM5Ed3IOngf5WLBw2r3gavGV2FDc/RDn28GTgM8I +Bo54MB+CcQKBgQCSuRjRgCjw/J8SIrZ9YQe1yoImaHz+ildMemyk5vomu0X085db +u3c6o1RGEkqQJMHGOjBh8HSAmNVE0jHICreTbBpIFKqNfE6ke+IZ+r8jFiwcr8Ex +1Uj4IyRsvHQ6AWyPvXcE1V86lGhxmkAETVbriCyxwKu5uhs2wxtTuTDrNwKBgCmt +khI92ybdfjzecmMLMGpMndgWSODDIWxqiEnhrIZKKwuQAbiJpCYOKTaiSW6tVere +Lbi+Xj2NwaLCn0brkKbkWr3hVVVoB3QNLEpGA58veOzNYsKpDkj9K5Kgt6mQEedE +MSNb1N46hq2v+diF9oa4w1yul0HJrADG/aia9ejxAoGAU3yES6OdE5QHyuiLHKfy +x5OOcQkTItk9IhxboSjYVB5GsBuKR+RloRZXmR9HhnzAjvbe0FVqGjdr5nHfICdE +8KsYpVf2c6x9NNbHSzqBARRg7x+H+uhfymgXfI4R3eL945ukFMC+qOU57kPvwriF +OvQYefm/1PN7vuLKsZ7g7iQ= +-----END PRIVATE KEY----- diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/files/auth_cert_invalid.crt b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/files/auth_cert_invalid.crt new file mode 100644 index 000000000..d6457a2f4 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/files/auth_cert_invalid.crt @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDDTCCAfWgAwIBAgIUWprzykl5M9p8NmcaS8v7mTQqCZEwDQYJKoZIhvcNAQEL +BQAwFTETMBEGA1UEAwwKdmF1bHQtdGVzdDAgFw0yMTEwMTkwMjE1MThaGA8yMTIx +MDkyNTAyMTUxOFowFTETMBEGA1UEAwwKdmF1bHQtdGVzdDCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAM6Aha5bUcWwuikIFZUAccJXYAC5oczybwN6VDHD +dDugikic/AT1AkaRYAhJiLWAD0XeOHghf1eCpeC+ZYcbZssXr7NZ/fUmR28ni1Nt +QRTcZ0bEBBYaEjEoNHenwAOsuBhJuawvfNI+Jns6ejoLIC3nUgQabWp4gPbfrC5y +WWrPRzXZEMp9hFppxHRIHzW3yqxlOXO2hUC5UtVElYkh0ojBSCVXe8iFfWTBnZrc +uTIm85y1x+k1kD7oyWxTZiPaUtTd9UM6pG1iYq5Tfbzx7rx5ntiTMwSZL4Y3lUWv +kQ7WCXPV7L0p7NpRyIQQH6McBZiuatv8kEGRKqDtNhF5kDECAwEAAaNTMFEwHQYD +VR0OBBYEFL04vjdsoUCPoZ64btbGam88REPQMB8GA1UdIwQYMBaAFL04vjdsoUCP +oZ64btbGam88REPQMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEB +AMaL/01nXbYzlzy6Vrn6R6wcHAsrSWC1syvQTaJzHQFmp3bxs7XUu3ZQdRbppxCK +INDXtrzupxpsBV2qSdwrPsn79umkSb51NFtHz7cMcmK18cuayviPpEPNN3IZuznU +3MFNd+sKoxX2udp12tGwjkerRrgPcG44aWch18QQ7H/jPld51uaujRs6fsA4lUNF +E2uPdiUNridbnxIw7bUgz9Vcq5d+XzHFvaF5QGp/mqmY6tsp/rMJCOME/dpBh27/ +Y/xh1qG92gSeT57IPNvd7M/yvKmcG2lRVv2rwm1oruYYaaHgqrai90sFDN+ttfVT +hqcx3qhE67dlFGLN03ico/8= +-----END CERTIFICATE----- diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/files/auth_cert_invalid.key b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/files/auth_cert_invalid.key new file mode 100644 index 000000000..70ee0b68b --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/files/auth_cert_invalid.key @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDOgIWuW1HFsLop +CBWVAHHCV2AAuaHM8m8DelQxw3Q7oIpInPwE9QJGkWAISYi1gA9F3jh4IX9XgqXg +vmWHG2bLF6+zWf31JkdvJ4tTbUEU3GdGxAQWGhIxKDR3p8ADrLgYSbmsL3zSPiZ7 +Ono6CyAt51IEGm1qeID236wucllqz0c12RDKfYRaacR0SB81t8qsZTlztoVAuVLV +RJWJIdKIwUglV3vIhX1kwZ2a3LkyJvOctcfpNZA+6MlsU2Yj2lLU3fVDOqRtYmKu +U3288e68eZ7YkzMEmS+GN5VFr5EO1glz1ey9KezaUciEEB+jHAWYrmrb/JBBkSqg +7TYReZAxAgMBAAECggEAFAfJb8kx3GAxHovHNCXP4w8Ao/FdrXiSph62jgEshCAl +abe4hm6RovC1BPjFBv39/UP57EUrifyFRJCTXpCc3vRUOaxwAtiMs059qED/wBtE +9UtMLiynORdyR20BJhoZ3VlBZl9Q1z/qEafxdTHhFDLM9NLvbBV2RPwbQoIbBE4c +LDz02ISvvX/2TQvCzXV723XhAWPAmXDfK0sxONJ2adlp5LdNOBmwbGzHIymTVFFR +yJh9fk67hSi6KBbvVw+AuTXhaZ48hBTfD7aG2PF0knZRug4qlL840u4h+YD/PRb4 +FBs26XTzgX/fi4tTfR+fr43tbl/61JVOhV6PgmZqAQKBgQDxnP+aEQ6Lw4FuPFbf +ahwi/kJsng8GR7PfTLE9oOeCErmL2h8BdLqwoFNbUocrxzc7AnYQ2qLi6C+ord2j +huKcqvC4uV9EcBtm5PzszpsmvcYPdlMoJ2uUlpf4gNMw4QAQsdVkBl4dS1RmqfOF +nlB1Qr2fI13KxcTBLnIyF78eEQKBgQDazE9fY3mC859Vkuk90C/1cebb2D3JQ5+p +uyQOFzIrj4dIY9FtleuPG8T6pMRG0+VJdY0m2y25yWddrkR987d9HKdC6+O+7z9d +3K9Cdwudl+UgJDKRAjwpASSW8cN/tRinKqdLMaB7LG6zqjw2c6fPrJQDUpkqYQNW +Nf2oe3uwIQKBgGGUKpaqdNl1Z/yBWgR48kwfq+v6Gc9LfhWc/6gIJ/ecYFtfCE6q +OgvDdGbZecXwexnV8KD16/sAEt9o+K5md3cWgonYM+WL8UUpDg0FlBudYTQ1cXgy +E3y7JT0zqhd/h2s2pxvyhmgGWFvuH2/DdHMnNTXkLqkKHmB2xZ/9fy5BAoGAA4uo +BZ2xDGQyQsq8qNnEzS9pVEquxaIo5jE4JlpN39LVmGC1thbl2E3U5VLiM1ufrY8Q +GH8YwJf46tW2GmxmnspmE3mLGT77QeQfmu4e7BDYD6tGfn8QxoFUPHXyexFOMvdH ++rn1JcnUiWjKzP8WyxC63UrrXHpx+WWqlJ5bpMECgYEAjWUgQyAzep38gVSdwtIc +mrxOVUXHNPgts4DjIcu5Rbxy6E4CQ4jBiBdxNnVNEYgkyNPfKqjOtb4dEOEwO+XC +vz1ZCozUms0ALjbba6/zIHiowBOo8ORIngK4+zMx3fx+2wSfkeZ2kKOTFflvNOBk +QItFtv0Xwa/UhMWmnJ5uAW8= +-----END PRIVATE KEY----- diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/meta/main.yml new file mode 100644 index 000000000..151b31e18 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/meta/main.yml @@ -0,0 +1,5 @@ +--- +dependencies: + - setup_vault_test_plugins + - setup_cert_content + - setup_vault_configure diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/tasks/cert_setup.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/tasks/cert_setup.yml new file mode 100644 index 000000000..581e78ee3 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/tasks/cert_setup.yml @@ -0,0 +1,31 @@ +--- +- name: "Setup block" + vars: + is_default_path: "{{ this_path == default_path }}" + block: + - name: "Enable the cert auth method" + vault_ci_enable_auth: + method_type: '{{ ansible_hashi_vault_auth_method }}' + path: "{{ omit if is_default_path else this_path }}" + config: + default_lease_ttl: 60m + + - name: Create a cert policy + vault_ci_policy_put: + name: cert-policy + policy: | + path "auth/{{ this_path }}/login" { + capabilities = [ "create", "read" ] + } + + - name: "Create a named role" + vault_ci_write: + path: "auth/{{ this_path }}/certs/vault_test" + data: + certificate: "{{ _auth_cert }}" + allowed_common_names: "{{ auth_cert_cn }}" + # in docs, this is token_policies (changed in Vault 1.2) + # use 'policies' to support older versions + policies: "{{ 'test-policy' if is_default_path else 'alt-policy' }},cert-policy" + vars: + _auth_cert: '{{ lookup("file", auth_cert_cert) }}' diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/tasks/cert_test_controller.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/tasks/cert_test_controller.yml new file mode 100644 index 000000000..3b0eff304 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/tasks/cert_test_controller.yml @@ -0,0 +1,43 @@ +--- +- name: "Test block" + vars: + is_default_path: "{{ this_path == default_path }}" + kwargs_common: + ca_cert: "{{ vault_cert_file }}" + kwargs_mount: "{{ {} if is_default_path else {'mount_point': this_path} }}" + kwargs_cert: + cert_auth_public_key: "{{ auth_cert_cert }}" + cert_auth_private_key: "{{ auth_cert_key }}" + kwargs: "{{ kwargs_common | combine(kwargs_mount) | combine(kwargs_cert) }}" + block: + - name: Set the HTTPS connection address + set_fact: + ansible_hashi_vault_addr: "{{ vault_test_server_https }}" + # the purpose of this test is to catch when the plugin accepts mount_point but does not pass it into hvac + # we set the policy of the default mount to deny access to this secret and so we expect failure when the mount + # is default, and success when the mount is alternate + - name: Check auth mount differing result + set_fact: + response: "{{ lookup('vault_test_auth', **kwargs) }}" + + - assert: + fail_msg: "A token from mount path '{{ this_path }}' had the wrong policy: {{ response.login.auth.policies }}" + that: + - ('test-policy' in response.login.auth.policies) | bool == is_default_path + - ('test-policy' not in response.login.auth.policies) | bool != is_default_path + - ('alt-policy' in response.login.auth.policies) | bool != is_default_path + - ('alt-policy' not in response.login.auth.policies) | bool == is_default_path + + - name: Failure expected when erroneous credentials are used + vars: + kwargs_cert: + cert_auth_public_key: "{{ invalid_auth_cert_cert }}" + cert_auth_private_key: "{{ invalid_auth_cert_key }}" + set_fact: + response: "{{ lookup('vault_test_auth', want_exception=true, **kwargs) }}" + + - assert: + fail_msg: "An invalid cert somehow did not cause a failure." + that: + - response is failed + - response.msg is search('invalid certificate or no client certificate supplied') diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/tasks/cert_test_target.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/tasks/cert_test_target.yml new file mode 100644 index 000000000..897a2e1d3 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/tasks/cert_test_target.yml @@ -0,0 +1,40 @@ +--- +- name: "Test block" + vars: + is_default_path: "{{ this_path == default_path }}" + module_defaults: + vault_test_auth: + url: "{{ vault_test_server_https }}" + ca_cert: "{{ vault_cert_file }}" + auth_method: "{{ ansible_hashi_vault_auth_method }}" + mount_point: "{{ omit if is_default_path else this_path }}" + cert_auth_public_key: "{{ auth_cert_cert }}" + cert_auth_private_key: "{{ auth_cert_key }}" + block: + # the purpose of this test is to catch when the plugin accepts mount_point but does not pass it into hvac + # we set the policy of the default mount to deny access to this secret and so we expect failure when the mount + # is default, and success when the mount is alternate + - name: Check auth mount differing result + vault_test_auth: + register: response + + - assert: + fail_msg: "A token from mount path '{{ this_path }}' had the wrong policy: {{ response.login.auth.policies }}" + that: + - ('test-policy' in response.login.auth.policies) | bool == is_default_path + - ('test-policy' not in response.login.auth.policies) | bool != is_default_path + - ('alt-policy' in response.login.auth.policies) | bool != is_default_path + - ('alt-policy' not in response.login.auth.policies) | bool == is_default_path + + - name: Failure expected when erroneous credentials are used + vault_test_auth: + cert_auth_public_key: "{{ invalid_auth_cert_cert }}" + cert_auth_private_key: "{{ invalid_auth_cert_key }}" + want_exception: yes + register: response + + - assert: + fail_msg: "An invalid cert somehow did not cause a failure." + that: + - response.inner is failed + - response.msg is search('invalid certificate or no client certificate supplied') diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/tasks/main.yml new file mode 100644 index 000000000..7ac78b17f --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_cert/tasks/main.yml @@ -0,0 +1,57 @@ +--- +# task vars are not templated when used as vars, so we'll need to set_fact this evaluate the template +# see: https://github.com/ansible/ansible/issues/73268 +- name: Persist defaults + set_fact: + '{{ item.key }}': "{{ lookup('vars', item.key) }}" + loop: "{{ lookup('file', role_path ~ '/defaults/main.yml') | from_yaml | dict2items }}" + loop_control: + label: '{{ item.key }}' + +- name: Configuration tasks + module_defaults: + vault_ci_enable_auth: '{{ vault_plugins_module_defaults_common }}' + vault_ci_write: '{{ vault_plugins_module_defaults_common }}' + vault_ci_read: '{{ vault_plugins_module_defaults_common }}' + vault_ci_policy_put: '{{ vault_plugins_module_defaults_common }}' + block: + - name: Canary for cert auth + vault_ci_read: + path: '{{ vault_cert_canary.path }}' + register: canary + + - name: Configure cert + when: canary.result is none + loop: '{{ auth_paths }}' + include_tasks: + file: cert_setup.yml + apply: + vars: + default_path: '{{ ansible_hashi_vault_auth_method }}' + this_path: '{{ item }}' + + - name: Write Canary + when: canary.result is none + vault_ci_write: + path: '{{ vault_cert_canary.path }}' + data: + value: '{{ vault_cert_canary.value }}' + + +- name: Run cert tests (controller) + loop: '{{ auth_paths }}' + include_tasks: + file: cert_test_controller.yml + apply: &test_apply + vars: + default_path: '{{ ansible_hashi_vault_auth_method }}' + this_path: '{{ item }}' + module_defaults: + assert: + quiet: yes + +- name: Run cert tests (target) + loop: '{{ auth_paths }}' + include_tasks: + file: cert_test_target.yml + apply: *test_apply diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/aliases new file mode 100644 index 000000000..eb10b1469 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/aliases @@ -0,0 +1,2 @@ +vault/auth/jwt +context/target diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/defaults/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/defaults/main.yml new file mode 100644 index 000000000..12763e969 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/defaults/main.yml @@ -0,0 +1,11 @@ +--- +ansible_hashi_vault_url: '{{ vault_test_server_http }}' +ansible_hashi_vault_auth_method: jwt + +auth_paths: + - jwt + - jwt-alt + +vault_jwt_canary: + path: cubbyhole/configure_jwt + value: complete # value does not matter diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/files/jwt_private.pem b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/files/jwt_private.pem new file mode 100644 index 000000000..61056a549 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/files/jwt_private.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEogIBAAKCAQEAnzyis1ZjfNB0bBgKFMSvvkTtwlvBsaJq7S5wA+kzeVOVpVWw +kWdVha4s38XM/pa/yr47av7+z3VTmvDRyAHcaT92whREFpLv9cj5lTeJSibyr/Mr +m/YtjCZVWgaOYIhwrXwKLqPr/11inWsAkfIytvHWTxZYEcXLgAXFuUuaS3uF9gEi +NQwzGTU1v0FqkqTBr4B8nW3HCN47XUu0t8Y0e+lf4s4OxQawWD79J9/5d3Ry0vbV +3Am1FtGJiJvOwRsIfVChDpYStTcHTCMqtvWbV6L11BWkpzGXSW4Hv43qa+GSYOD2 +QU68Mb59oSk2OB+BtOLpJofmbGEGgvmwyCI9MwIDAQABAoIBACiARq2wkltjtcjs +kFvZ7w1JAORHbEufEO1Eu27zOIlqbgyAcAl7q+/1bip4Z/x1IVES84/yTaM8p0go +amMhvgry/mS8vNi1BN2SAZEnb/7xSxbflb70bX9RHLJqKnp5GZe2jexw+wyXlwaM ++bclUCrh9e1ltH7IvUrRrQnFJfh+is1fRon9Co9Li0GwoN0x0byrrngU8Ak3Y6D9 +D8GjQA4Elm94ST3izJv8iCOLSDBmzsPsXfcCUZfmTfZ5DbUDMbMxRnSo3nQeoKGC +0Lj9FkWcfmLcpGlSXTO+Ww1L7EGq+PT3NtRae1FZPwjddQ1/4V905kyQFLamAA5Y +lSpE2wkCgYEAy1OPLQcZt4NQnQzPz2SBJqQN2P5u3vXl+zNVKP8w4eBv0vWuJJF+ +hkGNnSxXQrTkvDOIUddSKOzHHgSg4nY6K02ecyT0PPm/UZvtRpWrnBjcEVtHEJNp +bU9pLD5iZ0J9sbzPU/LxPmuAP2Bs8JmTn6aFRspFrP7W0s1Nmk2jsm0CgYEAyH0X ++jpoqxj4efZfkUrg5GbSEhf+dZglf0tTOA5bVg8IYwtmNk/pniLG/zI7c+GlTc9B +BwfMr59EzBq/eFMI7+LgXaVUsM/sS4Ry+yeK6SJx/otIMWtDfqxsLD8CPMCRvecC +2Pip4uSgrl0MOebl9XKp57GoaUWRWRHqwV4Y6h8CgYAZhI4mh4qZtnhKjY4TKDjx +QYufXSdLAi9v3FxmvchDwOgn4L+PRVdMwDNms2bsL0m5uPn104EzM6w1vzz1zwKz +5pTpPI0OjgWN13Tq8+PKvm/4Ga2MjgOgPWQkslulO/oMcXbPwWC3hcRdr9tcQtn9 +Imf9n2spL/6EDFId+Hp/7QKBgAqlWdiXsWckdE1Fn91/NGHsc8syKvjjk1onDcw0 +NvVi5vcba9oGdElJX3e9mxqUKMrw7msJJv1MX8LWyMQC5L6YNYHDfbPF1q5L4i8j +8mRex97UVokJQRRA452V2vCO6S5ETgpnad36de3MUxHgCOX3qL382Qx9/THVmbma +3YfRAoGAUxL/Eu5yvMK8SAt/dJK6FedngcM3JEFNplmtLYVLWhkIlNRGDwkg3I5K +y18Ae9n7dHVueyslrb6weq7dTkYDi3iOYRW8HRkIQh06wEdbxt0shTzAJvvCQfrB +jg/3747WSsf/zBTcHihTRBdAv6OmdhV4/dD5YBfLAkLrd+mX7iE= +-----END RSA PRIVATE KEY----- diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/files/jwt_public.pem b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/files/jwt_public.pem new file mode 100644 index 000000000..12301e011 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/files/jwt_public.pem @@ -0,0 +1,9 @@ +-----BEGIN PUBLIC KEY----- +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAnzyis1ZjfNB0bBgKFMSv +vkTtwlvBsaJq7S5wA+kzeVOVpVWwkWdVha4s38XM/pa/yr47av7+z3VTmvDRyAHc +aT92whREFpLv9cj5lTeJSibyr/Mrm/YtjCZVWgaOYIhwrXwKLqPr/11inWsAkfIy +tvHWTxZYEcXLgAXFuUuaS3uF9gEiNQwzGTU1v0FqkqTBr4B8nW3HCN47XUu0t8Y0 +e+lf4s4OxQawWD79J9/5d3Ry0vbV3Am1FtGJiJvOwRsIfVChDpYStTcHTCMqtvWb +V6L11BWkpzGXSW4Hv43qa+GSYOD2QU68Mb59oSk2OB+BtOLpJofmbGEGgvmwyCI9 +MwIDAQAB +-----END PUBLIC KEY----- diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/files/token.jwt b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/files/token.jwt new file mode 100644 index 000000000..e38d1040b --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/files/token.jwt @@ -0,0 +1 @@ +eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOiJ0ZXN0Iiwic3ViIjoiaGFzaGlfdmF1bHRAdGVzdC5hbnNpYmxlLmNvbSIsIm5iZiI6MTYwNDgzNTEwMCwiZXhwIjozMjQ5OTA1MTM1OX0.NEWQR_Eicw8Fa9gU9HPY2M9Rp1czNTUKrICwKe7l1edaZNtgxhMGdyqnBsPrHL_dw1ZIwdvwVAioi8bEyIDEWICls0lzHwM169rrea3WEFrB5CP17A6DkvYL0cnOnGutbwUrXInPCRUfvRogIKEI-w8X-ris9LX2FBPKhXX1K3U0D8uYi5_9t8YWywTe0NkYvY-nTzMugK1MXMoBJ3fCksweJiDp6BOo3v9OU03MLgwgri2UdsqVb7WSk4XvWG-lmbiiSAWVf9BI3mecVDUHpYxbEqjv1HDG_wdX8zy1ZlAFbjp3kIpMlDVK1Q5nu_VPDzQrEvPdTnOzU36LE4UF-w diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/files/token_invalid.jwt b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/files/token_invalid.jwt new file mode 100644 index 000000000..aa608e6c4 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/files/token_invalid.jwt @@ -0,0 +1 @@ +eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOiIxMjM0IiwidXNlcl9jbGFpbSI6InVzZXJfY2xhaW0iLCJuYmYiOjE2MDQ4MzUxMDAsImV4cCI6MzI0OTkwNTEzNTl9.etc2WSH7kR3fHFlVt4wlBYFKNn7Z4DQcRVXUK4gGF-Q diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/meta/main.yml new file mode 100644 index 000000000..d3acb69e9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - setup_vault_test_plugins + - setup_vault_configure diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/tasks/jwt_setup.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/tasks/jwt_setup.yml new file mode 100644 index 000000000..c26aa61ad --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/tasks/jwt_setup.yml @@ -0,0 +1,32 @@ +- name: "Setup block" + vars: + is_default_path: "{{ this_path == default_path }}" + block: + - name: 'Enable the JWT auth method' + vault_ci_enable_auth: + method_type: jwt + path: '{{ omit if is_default_path else this_path }}' + config: + default_lease_ttl: 60m + + - name: 'Configure the JWT auth method' + vars: + jwt_public_key: '{{ lookup("file", "jwt_public.pem") }}' + vault_ci_write: + path: 'auth/{{ this_path }}/config' + data: + # in docs, this is token_policies (changed in Vault 1.2) + # use 'policies' to support older versions + policies: "{{ 'test-policy' if is_default_path else 'alt-policy' }},approle-policy" + jwt_validation_pubkeys: '{{ jwt_public_key }}' + + - name: 'Create a named role' + vault_ci_write: + path: 'auth/{{ this_path }}/role/test-role' + data: + # in docs, this is token_policies (changed in Vault 1.2) + # use 'policies' to support older versions + policies: "{{ 'test-policy' if is_default_path else 'alt-policy' }},approle-policy" + role_type: jwt + user_claim: sub + bound_audiences: test diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/tasks/jwt_test_controller.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/tasks/jwt_test_controller.yml new file mode 100644 index 000000000..fb20c8377 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/tasks/jwt_test_controller.yml @@ -0,0 +1,39 @@ +- name: "Test block" + vars: + jwt: '{{ lookup("file", "token.jwt") }}' + jwt_invalid: '{{ lookup("file", "token_invalid.jwt") }}' + is_default_path: "{{ this_path == default_path }}" + kwargs_common: + role_id: test-role + kwargs_mount: "{{ {} if is_default_path else {'mount_point': this_path} }}" + kwargs_jwt: + jwt: '{{ jwt }}' + kwargs: "{{ kwargs_common | combine(kwargs_mount) | combine(kwargs_jwt) }}" + block: + # the purpose of this test is to catch when the plugin accepts mount_point but does not pass it into hvac + # we set the policy of the default mount to deny access to this secret and so we expect failure when the mount + # is default, and success when the mount is alternate + - name: Check auth mount differing result + set_fact: + response: "{{ lookup('vault_test_auth', **kwargs) }}" + + - assert: + fail_msg: "A token from mount path '{{ this_path }}' had the wrong policy: {{ response.login.auth.policies }}" + that: + - ('test-policy' in response.login.auth.policies) | bool == is_default_path + - ('test-policy' not in response.login.auth.policies) | bool != is_default_path + - ('alt-policy' in response.login.auth.policies) | bool != is_default_path + - ('alt-policy' not in response.login.auth.policies) | bool == is_default_path + + - name: Failure expected when erroneous credentials are used + vars: + kwargs_jwt: + jwt: '{{ jwt_invalid }}' + set_fact: + response: "{{ lookup('vault_test_auth', want_exception=true, **kwargs) }}" + + - assert: + fail_msg: "An invalid JWT somehow did not cause a failure." + that: + - response is failed + - response.msg is search('no known key successfully validated the token signature') diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/tasks/jwt_test_target.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/tasks/jwt_test_target.yml new file mode 100644 index 000000000..25110f8fe --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/tasks/jwt_test_target.yml @@ -0,0 +1,39 @@ +- name: "Test block" + vars: + jwt: '{{ lookup("file", "token.jwt") }}' + jwt_invalid: '{{ lookup("file", "token_invalid.jwt") }}' + is_default_path: "{{ this_path == default_path }}" + module_defaults: + vault_test_auth: + url: '{{ ansible_hashi_vault_url }}' + auth_method: '{{ ansible_hashi_vault_auth_method }}' + role_id: test-role + mount_point: '{{ omit if is_default_path else this_path }}' + jwt: '{{ jwt }}' + block: + # the purpose of this test is to catch when the plugin accepts mount_point but does not pass it into hvac + # we set the policy of the default mount to deny access to this secret and so we expect failure when the mount + # is default, and success when the mount is alternate + - name: Check auth mount differing result + register: response + vault_test_auth: + + - assert: + fail_msg: "A token from mount path '{{ this_path }}' had the wrong policy: {{ response.login.auth.policies }}" + that: + - ('test-policy' in response.login.auth.policies) | bool == is_default_path + - ('test-policy' not in response.login.auth.policies) | bool != is_default_path + - ('alt-policy' in response.login.auth.policies) | bool != is_default_path + - ('alt-policy' not in response.login.auth.policies) | bool == is_default_path + + - name: Failure expected when erroneous credentials are used + register: response + vault_test_auth: + jwt: '{{ jwt_invalid }}' + want_exception: yes + + - assert: + fail_msg: "An invalid JWT somehow did not cause a failure." + that: + - response.inner is failed + - response.msg is search('no known key successfully validated the token signature') diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/tasks/main.yml new file mode 100644 index 000000000..1bef30955 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_jwt/tasks/main.yml @@ -0,0 +1,56 @@ +--- +# task vars are not templated when used as vars, so we'll need to set_fact this evaluate the template +# see: https://github.com/ansible/ansible/issues/73268 +- name: Persist defaults + set_fact: + '{{ item.key }}': "{{ lookup('vars', item.key) }}" + loop: "{{ lookup('file', role_path ~ '/defaults/main.yml') | from_yaml | dict2items }}" + loop_control: + label: '{{ item.key }}' + +- name: Configuration tasks + module_defaults: + vault_ci_enable_auth: '{{ vault_plugins_module_defaults_common }}' + vault_ci_write: '{{ vault_plugins_module_defaults_common }}' + vault_ci_read: '{{ vault_plugins_module_defaults_common }}' + block: + - name: Canary for JWT auth + vault_ci_read: + path: '{{ vault_jwt_canary.path }}' + register: canary + + - name: Configure JWT + when: canary.result is none + loop: '{{ auth_paths }}' + include_tasks: + file: jwt_setup.yml + apply: + vars: + default_path: '{{ ansible_hashi_vault_auth_method }}' + this_path: '{{ item }}' + + - name: Write Canary + when: canary.result is none + vault_ci_write: + path: '{{ vault_jwt_canary.path }}' + data: + value: '{{ vault_jwt_canary.value }}' + + +- name: Run JWT tests (controller) + loop: '{{ auth_paths }}' + include_tasks: + file: jwt_test_controller.yml + apply: &test_apply + vars: + default_path: '{{ ansible_hashi_vault_auth_method }}' + this_path: '{{ item }}' + module_defaults: + assert: + quiet: yes + +- name: Run JWT tests (target) + loop: '{{ auth_paths }}' + include_tasks: + file: jwt_test_target.yml + apply: *test_apply diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_ldap/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_ldap/aliases new file mode 100644 index 000000000..126af258d --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_ldap/aliases @@ -0,0 +1,2 @@ +vault/auth/ldap +context/target diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_ldap/defaults/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_ldap/defaults/main.yml new file mode 100644 index 000000000..05f9efcd3 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_ldap/defaults/main.yml @@ -0,0 +1,10 @@ +--- +ansible_hashi_vault_url: '{{ vault_mmock_server_http }}' +ansible_hashi_vault_auth_method: ldap + +auth_paths: + - ldap + - ldap-alt + +ldap_username: ldapuser +ldap_password: ldappass diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_ldap/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_ldap/meta/main.yml new file mode 100644 index 000000000..d3acb69e9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_ldap/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - setup_vault_test_plugins + - setup_vault_configure diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_ldap/tasks/ldap_test_controller.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_ldap/tasks/ldap_test_controller.yml new file mode 100644 index 000000000..78fc0a726 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_ldap/tasks/ldap_test_controller.yml @@ -0,0 +1,32 @@ +- name: "Test block" + vars: + is_default_path: "{{ this_path == default_path }}" + kwargs_mount: "{{ {} if is_default_path else {'mount_point': this_path} }}" + kwargs_common: + password: '{{ ldap_password }}' + kwargs: "{{ kwargs_common | combine(kwargs_mount) }}" + block: + # the purpose of this test is to catch when the plugin accepts mount_point but does not pass it into hvac + # we set the policy of the default mount to deny access to this secret and so we expect failure when the mount + # is default, and success when the mount is alternate + - name: Check auth mount differing result + set_fact: + response: "{{ lookup('vault_test_auth', username=ldap_username, **kwargs) }}" + + - assert: + fail_msg: "A token from mount path '{{ this_path }}' had the wrong policy: {{ response.login.auth.policies }}" + that: + - ('ldap-sample-policy' in response.login.auth.policies) | bool == is_default_path + - ('ldap-sample-policy' not in response.login.auth.policies) | bool != is_default_path + - ('ldap-alt-sample-policy' in response.login.auth.policies) | bool != is_default_path + - ('ldap-alt-sample-policy' not in response.login.auth.policies) | bool == is_default_path + + - name: Failure expected when something goes wrong (simulated) + set_fact: + response: "{{ lookup('vault_test_auth', username='fail-me-username', want_exception=true, **kwargs) }}" + + - assert: + fail_msg: "An invalid request somehow did not cause a failure." + that: + - response is failed + - "response.msg is search('ldap operation failed: failed to bind as user')" diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_ldap/tasks/ldap_test_target.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_ldap/tasks/ldap_test_target.yml new file mode 100644 index 000000000..663f9065e --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_ldap/tasks/ldap_test_target.yml @@ -0,0 +1,37 @@ +- name: "Test block" + vars: + is_default_path: "{{ this_path == default_path }}" + module_defaults: + vault_test_auth: + url: '{{ ansible_hashi_vault_url }}' + auth_method: '{{ ansible_hashi_vault_auth_method }}' + mount_point: '{{ omit if is_default_path else this_path }}' + username: '{{ ldap_username }}' + password: '{{ ldap_password }}' + block: + # the purpose of this test is to catch when the plugin accepts mount_point but does not pass it into hvac + # we set the policy of the default mount to deny access to this secret and so we expect failure when the mount + # is default, and success when the mount is alternate + - name: Check auth mount differing result + register: response + vault_test_auth: + + - assert: + fail_msg: "A token from mount path '{{ this_path }}' had the wrong policy: {{ response.login.auth.policies }}" + that: + - ('ldap-sample-policy' in response.login.auth.policies) | bool == is_default_path + - ('ldap-sample-policy' not in response.login.auth.policies) | bool != is_default_path + - ('ldap-alt-sample-policy' in response.login.auth.policies) | bool != is_default_path + - ('ldap-alt-sample-policy' not in response.login.auth.policies) | bool == is_default_path + + - name: Failure expected when something goes wrong (simulated) + register: response + vault_test_auth: + username: fail-me-username + want_exception: yes + + - assert: + fail_msg: "An invalid request somehow did not cause a failure." + that: + - response.inner is failed + - "response.msg is search('ldap operation failed: failed to bind as user')" diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_ldap/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_ldap/tasks/main.yml new file mode 100644 index 000000000..1e561648e --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_ldap/tasks/main.yml @@ -0,0 +1,23 @@ +--- +# task vars are not templated when used as vars, so we'll need to set_fact this evaluate the template +# see: https://github.com/ansible/ansible/issues/73268 +- name: Persist defaults + set_fact: + '{{ item.key }}': "{{ lookup('vars', item.key) }}" + loop: "{{ lookup('file', role_path ~ '/defaults/main.yml') | from_yaml | dict2items }}" + loop_control: + label: '{{ item.key }}' + +# there's no setup for this auth method because its API is mocked + +- name: Run ldap tests + loop: '{{ auth_paths | product(["target", "controller"]) | list }}' + include_tasks: + file: ldap_test_{{ item[1] }}.yml + apply: + vars: + default_path: ldap + this_path: '{{ item[0] }}' + module_defaults: + assert: + quiet: yes diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_none/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_none/aliases new file mode 100644 index 000000000..b6f32f9d7 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_none/aliases @@ -0,0 +1,2 @@ +vault/auth/none +context/target diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_none/defaults/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_none/defaults/main.yml new file mode 100644 index 000000000..d1e5ae61b --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_none/defaults/main.yml @@ -0,0 +1,3 @@ +--- +ansible_hashi_vault_url: '{{ vault_test_server_http }}' +ansible_hashi_vault_auth_method: none diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_none/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_none/meta/main.yml new file mode 100644 index 000000000..290705e5e --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_none/meta/main.yml @@ -0,0 +1,3 @@ +--- +dependencies: + - setup_vault_test_plugins diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_none/tasks/controller.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_none/tasks/controller.yml new file mode 100644 index 000000000..2c415d4b3 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_none/tasks/controller.yml @@ -0,0 +1,19 @@ +--- +# task vars are not templated when used as vars, so we'll need to set_fact this evaluate the template +# see: https://github.com/ansible/ansible/issues/73268 +- name: Persist defaults + set_fact: + '{{ item.key }}': "{{ lookup('vars', item.key) }}" + loop: "{{ lookup('file', role_path ~ '/defaults/main.yml') | from_yaml | dict2items }}" + loop_control: + label: '{{ item.key }}' + +# TODO: consider setting up a Vault agent in CI to provide a better test of the none method +- name: "Perform a login with 'none' auth type" + set_fact: + status: "{{ lookup('vault_test_auth') }}" + +- name: "Assert no login information is returned" + assert: + that: + - status.login == None diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_none/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_none/tasks/main.yml new file mode 100644 index 000000000..9ad1914f4 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_none/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- import_tasks: target.yml +- import_tasks: controller.yml diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_none/tasks/target.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_none/tasks/target.yml new file mode 100644 index 000000000..5803f1e91 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_none/tasks/target.yml @@ -0,0 +1,12 @@ +--- +# TODO: consider setting up a Vault agent in CI to provide a better test of the none method +- name: "Perform a login with 'none' auth type" + register: status + vault_test_auth: + url: '{{ ansible_hashi_vault_url }}' + auth_method: '{{ ansible_hashi_vault_auth_method }}' + +- name: "Assert no login information is returned" + assert: + that: + - status.login == None diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_token/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_token/aliases new file mode 100644 index 000000000..7595fd1d1 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_token/aliases @@ -0,0 +1,2 @@ +vault/auth/token +context/target diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_token/defaults/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_token/defaults/main.yml new file mode 100644 index 000000000..e6e84a51d --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_token/defaults/main.yml @@ -0,0 +1,3 @@ +--- +ansible_hashi_vault_url: '{{ vault_test_server_http }}' +ansible_hashi_vault_auth_method: token diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_token/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_token/meta/main.yml new file mode 100644 index 000000000..d3acb69e9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_token/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - setup_vault_test_plugins + - setup_vault_configure diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_token/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_token/tasks/main.yml new file mode 100644 index 000000000..768b8ac8c --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_token/tasks/main.yml @@ -0,0 +1,34 @@ +--- +# task vars are not templated when used as vars, so we'll need to set_fact this evaluate the template +# see: https://github.com/ansible/ansible/issues/73268 +- name: Persist defaults + set_fact: + '{{ item.key }}': "{{ lookup('vars', item.key) }}" + loop: "{{ lookup('file', role_path ~ '/defaults/main.yml') | from_yaml | dict2items }}" + loop_control: + label: '{{ item.key }}' + +- name: Configuration tasks + module_defaults: + vault_ci_token_create: '{{ vault_plugins_module_defaults_common }}' + block: + - name: 'Create a test non-root token' + vault_ci_token_create: + policies: test-policy + register: user_token_cmd + + - name: 'Create a test non-root token with no default policy' + vault_ci_token_create: + policies: test-policy + no_default_policy: true + register: user_token_no_default_policy_cmd + +- import_tasks: token_test_target.yml + module_defaults: + assert: + quiet: yes + +- import_tasks: token_test_controller.yml + module_defaults: + assert: + quiet: yes diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_token/tasks/token_test_controller.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_token/tasks/token_test_controller.yml new file mode 100644 index 000000000..137c747ce --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_token/tasks/token_test_controller.yml @@ -0,0 +1,89 @@ +- name: Test token with no default policy (missing lookup-self) + vars: + user_token: '{{ user_token_no_default_policy_cmd.result.auth.client_token }}' + secret: "{{ vault_kv2_api_path ~ '/secret1' }}" + expected_secret_value: foo1 + # ansible_hashi_vault_auth_method: token + block: + # task vars are not templated when used as vars, so we'll need to set_fact this evaluate the template + # see: https://github.com/ansible/ansible/issues/73268 + - set_fact: + ansible_hashi_vault_token: '{{ user_token }}' + + - name: Authenticate with a 'no default policy' token (failure expected) + vars: + ansible_hashi_vault_token_validate: true + set_fact: + response: "{{ lookup('vault_test_auth', want_exception=true) }}" + + - assert: + that: + - response is failed + - response.msg is search('Invalid Vault Token') + + - name: Authenticate with 'no default policy' token - with no validation + set_fact: + response: "{{ lookup('vault_test_auth') }}" + + - assert: + that: response.login.auth.client_token == user_token + + # if we could lookup-self, we'd be able to inspect the policies on this token, + # but since we can't, let's use the token to retrieve a secret we expect it to have access to. + # TODO: consider that we can use the root token we have access to in CI to lookup this token. + - name: Check that the token is usable + delegate_to: localhost + vault_ci_read: + url: '{{ ansible_hashi_vault_url }}' + token: '{{ response.login.auth.client_token }}' + path: '{{ secret }}' + register: secret_data + + - assert: + that: secret_data.result.data.data.value == expected_secret_value + + - name: Authenticate with an invalid token - with no validation + set_fact: + response: "{{ lookup('vault_test_auth', token='fake', token_validate=false) }}" + + - assert: + that: response.login.auth.client_token == 'fake' + + - name: Try to use the invalid token + delegate_to: localhost + vault_ci_read: + url: '{{ ansible_hashi_vault_url }}' + token: '{{ response.login.auth.client_token }}' + path: '{{ secret }}' + register: secret_data + ignore_errors: yes + + - assert: + that: + - secret_data is failed + - secret_data.msg is search('permission denied') + +- name: Normal token tests + vars: + user_token: '{{ user_token_cmd.result.auth.client_token }}' + expected_policy: test-policy + ansible_hashi_vault_token_validate: true + block: + - name: Authenticate with a token (with validation) + set_fact: + response: "{{ lookup('vault_test_auth', token=user_token) }}" + + - assert: + that: + - response.login.auth.client_token == user_token + - expected_policy in response.login.data.policies + - expected_policy in response.login.auth.policies + + - name: Authenticate with an invalid token (wuth validation) + set_fact: + response: "{{ lookup('vault_test_auth', token='fake', want_exception=true) }}" + + - assert: + that: + - response is failed + - response.msg is search('Invalid Vault Token') diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_token/tasks/token_test_target.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_token/tasks/token_test_target.yml new file mode 100644 index 000000000..cbfa30f70 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_token/tasks/token_test_target.yml @@ -0,0 +1,94 @@ +- name: Test token with no default policy (missing lookup-self) + vars: + user_token: '{{ user_token_no_default_policy_cmd.result.auth.client_token }}' + secret: "{{ vault_kv2_api_path ~ '/secret1' }}" + expected_secret_value: foo1 + module_defaults: + vault_test_auth: + url: '{{ ansible_hashi_vault_url }}' + token: '{{ user_token }}' + block: + - name: Authenticate with a 'no default policy' token (failure expected) + register: response + vault_test_auth: + token_validate: true + want_exception: true + + - assert: + that: + - response.inner is failed + - response.msg is search('Invalid Vault Token') + + - name: Authenticate with 'no default policy' token - with no validation + register: response + vault_test_auth: + + - assert: + that: response.login.auth.client_token == user_token + + # if we could lookup-self, we'd be able to inspect the policies on this token, + # but since we can't, let's use the token to retrieve a secret we expect it to have access to. + # TODO: consider that we can use the root token we have access to in CI to lookup this token. + - name: Check that the token is usable + vault_ci_read: + url: '{{ ansible_hashi_vault_url }}' + token: '{{ response.login.auth.client_token }}' + path: '{{ secret }}' + register: secret_data + + - assert: + that: secret_data.result.data.data.value == expected_secret_value + + - name: Authenticate with an invalid token - with no validation + register: response + vault_test_auth: + token: fake + token_validate: false + + - assert: + that: response.login.auth.client_token == 'fake' + + - name: Try to use the invalid token + vault_ci_read: + url: '{{ ansible_hashi_vault_url }}' + token: '{{ response.login.auth.client_token }}' + path: '{{ secret }}' + register: secret_data + ignore_errors: yes + + - assert: + that: + - secret_data is failed + - secret_data.msg is search('permission denied') + +- name: Normal token tests + vars: + user_token: '{{ user_token_cmd.result.auth.client_token }}' + expected_policy: test-policy + module_defaults: + vault_test_auth: + url: '{{ ansible_hashi_vault_url }}' + block: + - name: Authenticate with a token (with validation) + register: response + vault_test_auth: + token: '{{ user_token }}' + token_validate: true + + - assert: + that: + - response.login.auth.client_token == user_token + - expected_policy in response.login.data.policies + - expected_policy in response.login.auth.policies + + - name: Authenticate with an invalid token (with validation) + register: response + vault_test_auth: + token: fake + token_validate: true + want_exception: true + + - assert: + that: + - response.inner is failed + - response.msg is search('Invalid Vault Token') diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_userpass/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_userpass/aliases new file mode 100644 index 000000000..3cc449b46 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_userpass/aliases @@ -0,0 +1,2 @@ +vault/auth/userpass +context/target diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_userpass/defaults/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_userpass/defaults/main.yml new file mode 100644 index 000000000..7a4eb389b --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_userpass/defaults/main.yml @@ -0,0 +1,14 @@ +--- +ansible_hashi_vault_url: '{{ vault_test_server_http }}' +ansible_hashi_vault_auth_method: userpass + +auth_paths: + - userpass + - userpass-alt + +userpass_username: testuser +userpass_password: testpass + +vault_userpass_canary: + path: cubbyhole/configure_userpass + value: complete # value does not matter diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_userpass/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_userpass/meta/main.yml new file mode 100644 index 000000000..d3acb69e9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_userpass/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - setup_vault_test_plugins + - setup_vault_configure diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_userpass/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_userpass/tasks/main.yml new file mode 100644 index 000000000..39d6373f4 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_userpass/tasks/main.yml @@ -0,0 +1,50 @@ +--- +# task vars are not templated when used as vars, so we'll need to set_fact this evaluate the template +# see: https://github.com/ansible/ansible/issues/73268 +- name: Persist defaults + set_fact: + '{{ item.key }}': "{{ lookup('vars', item.key) }}" + loop: "{{ lookup('file', role_path ~ '/defaults/main.yml') | from_yaml | dict2items }}" + loop_control: + label: '{{ item.key }}' + +- name: Configuration tasks + module_defaults: + vault_ci_enable_auth: '{{ vault_plugins_module_defaults_common }}' + vault_ci_policy_put: '{{ vault_plugins_module_defaults_common }}' + vault_ci_write: '{{ vault_plugins_module_defaults_common }}' + vault_ci_read: '{{ vault_plugins_module_defaults_common }}' + block: + - name: Canary for userpass auth + vault_ci_read: + path: '{{ vault_userpass_canary.path }}' + register: canary + + - name: Configure userpass + when: canary.result is none + loop: '{{ auth_paths }}' + include_tasks: + file: userpass_setup.yml + apply: + vars: + default_path: '{{ ansible_hashi_vault_auth_method }}' + this_path: '{{ item }}' + + - name: Write Canary + when: canary.result is none + vault_ci_write: + path: '{{ vault_userpass_canary.path }}' + data: + value: '{{ vault_userpass_canary.value }}' + + - name: Run userpass tests + loop: '{{ auth_paths | product(["target", "controller"]) | list }}' + include_tasks: + file: userpass_test_{{ item[1] }}.yml + apply: + vars: + default_path: '{{ ansible_hashi_vault_auth_method }}' + this_path: '{{ item[0] }}' + module_defaults: + assert: + quiet: yes diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_userpass/tasks/userpass_setup.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_userpass/tasks/userpass_setup.yml new file mode 100644 index 000000000..067a21967 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_userpass/tasks/userpass_setup.yml @@ -0,0 +1,27 @@ +- name: "Setup block" + vars: + is_default_path: "{{ this_path == default_path }}" + block: + - name: 'Enable the userpass auth method' + vault_ci_enable_auth: + method_type: userpass + path: '{{ omit if is_default_path else this_path }}' + config: + default_lease_ttl: 60m + + - name: 'Create a userpass policy' + vault_ci_policy_put: + name: userpass-policy + policy: | + path "auth/{{ this_path }}/login" { + capabilities = [ "create", "read" ] + } + + - name: 'Create a named role' + vault_ci_write: + path: 'auth/{{ this_path }}/users/{{ userpass_username }}' + data: + # in docs, this is token_policies (changed in Vault 1.2) + # use 'policies' to support older versions + policies: "{{ 'test-policy' if is_default_path else 'alt-policy' }},userpass-policy" + password: '{{ userpass_password }}' diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_userpass/tasks/userpass_test_controller.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_userpass/tasks/userpass_test_controller.yml new file mode 100644 index 000000000..d1922a622 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_userpass/tasks/userpass_test_controller.yml @@ -0,0 +1,32 @@ +- name: "Test block" + vars: + is_default_path: "{{ this_path == default_path }}" + kwargs_mount: "{{ {} if is_default_path else {'mount_point': this_path} }}" + kwargs_common: + username: '{{ userpass_username }}' + kwargs: "{{ kwargs_common | combine(kwargs_mount) }}" + block: + # the purpose of this test is to catch when the plugin accepts mount_point but does not pass it into hvac + # we set the policy of the default mount to deny access to this secret and so we expect failure when the mount + # is default, and success when the mount is alternate + - name: Check auth mount differing result + set_fact: + response: "{{ lookup('vault_test_auth', password=userpass_password, **kwargs) }}" + + - assert: + fail_msg: "A token from mount path '{{ this_path }}' had the wrong policy: {{ response.login.auth.policies }}" + that: + - ('test-policy' in response.login.auth.policies) | bool == is_default_path + - ('test-policy' not in response.login.auth.policies) | bool != is_default_path + - ('alt-policy' in response.login.auth.policies) | bool != is_default_path + - ('alt-policy' not in response.login.auth.policies) | bool == is_default_path + + - name: Failure expected when erroneous credentials are used + set_fact: + response: "{{ lookup('vault_test_auth', password='fake', want_exception=true, **kwargs) }}" + + - assert: + fail_msg: "An invalid password somehow did not cause a failure." + that: + - response is failed + - response.msg is search('invalid username or password') diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/auth_userpass/tasks/userpass_test_target.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_userpass/tasks/userpass_test_target.yml new file mode 100644 index 000000000..a502ba573 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/auth_userpass/tasks/userpass_test_target.yml @@ -0,0 +1,37 @@ +- name: "Test block" + vars: + is_default_path: "{{ this_path == default_path }}" + module_defaults: + vault_test_auth: + url: '{{ ansible_hashi_vault_url }}' + auth_method: '{{ ansible_hashi_vault_auth_method }}' + mount_point: '{{ omit if is_default_path else this_path }}' + username: '{{ userpass_username }}' + password: '{{ userpass_password }}' + block: + # the purpose of this test is to catch when the plugin accepts mount_point but does not pass it into hvac + # we set the policy of the default mount to deny access to this secret and so we expect failure when the mount + # is default, and success when the mount is alternate + - name: Check auth mount differing result + register: response + vault_test_auth: + + - assert: + fail_msg: "A token from mount path '{{ this_path }}' had the wrong policy: {{ response.login.auth.policies }}" + that: + - ('test-policy' in response.login.auth.policies) | bool == is_default_path + - ('test-policy' not in response.login.auth.policies) | bool != is_default_path + - ('alt-policy' in response.login.auth.policies) | bool != is_default_path + - ('alt-policy' not in response.login.auth.policies) | bool == is_default_path + + - name: Failure expected when erroneous credentials are used + register: response + vault_test_auth: + password: fake + want_exception: yes + + - assert: + fail_msg: "An invalid password somehow did not cause a failure." + that: + - response.inner is failed + - response.msg is search('invalid username or password') diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/connection_options/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/connection_options/aliases new file mode 100644 index 000000000..224c6be6d --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/connection_options/aliases @@ -0,0 +1,2 @@ +vault/connection +context/target diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/connection_options/defaults/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/connection_options/defaults/main.yml new file mode 100644 index 000000000..d8f2d8c6e --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/connection_options/defaults/main.yml @@ -0,0 +1,3 @@ +--- +# when False the tests requiring a valid SSL connection to Vault will be skipped +vault_run_https_tests: True diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/connection_options/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/connection_options/meta/main.yml new file mode 100644 index 000000000..2cc0486bd --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/connection_options/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - setup_vault_test_plugins + - setup_cert_content diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/connection_options/tasks/controller.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/connection_options/tasks/controller.yml new file mode 100644 index 000000000..f9c44fefe --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/connection_options/tasks/controller.yml @@ -0,0 +1,97 @@ +--- +- name: Connection Tests + module_defaults: + assert: + quiet: yes + vars: + # we don't set kwargs: {} here because of https://github.com/ansible/ansible/issues/75286 + test_cmd: &test + set_fact: + result: "{{ lookup('vault_test_connection', '', **kwargs) }}" + assert_cmd: &assert + assert: + that: result.status.initialized + block: + - name: HTTP connection + vars: + kwargs: {} + block: + - name: Set the HTTP connection address + set_fact: + ansible_hashi_vault_url: '{{ vault_test_server_http }}' + + - name: test HTTP + <<: *test + + - <<: *assert + + - name: test HTTP with proxy + <<: *test + vars: + kwargs: + url: '{{ vault_proxy_alt_vault_http | default(vault_test_server_http) }}' + proxies: '{{ vault_proxy_server }}' + + - <<: *assert + + - name: test HTTP with wrong proxy + <<: *test + vars: + vault_test_connection_want_exception: yes + ansible_hashi_vault_proxies: http://127.0.0.1:4567 + ansible_hashi_vault_retries: 2 + + - assert: + that: + - result is failed + - result.msg is search('Cannot connect to proxy') + - result.retries == 2 + + - name: HTTPS connection + when: vault_run_https_tests | bool + vars: + kwargs: &kwargs + ca_cert: '{{ vault_cert_file }}' + block: + - name: Set the HTTPS connection address + set_fact: + ansible_hashi_vault_addr: '{{ vault_test_server_https }}' + + - name: test HTTPS with no cert validation + <<: *test + vars: + ansible_hashi_vault_validate_certs: false + + - <<: *assert + + - name: test HTTPS with cert validation + <<: *test + vars: + vault_test_connection_want_args: yes + vault_test_connection_want_exception: yes + + - <<: *assert + + - name: test HTTPS with proxy & cert validation + <<: *test + vars: + kwargs: + <<: *kwargs + url: '{{ vault_proxy_alt_vault_https | default(vault_test_server_http) }}' + proxies: '{{ vault_proxy_server }}' + + - <<: *assert + + - name: test HTTPS with wrong proxy & cert validation + <<: *test + vars: + vault_test_connection_want_exception: yes + ansible_hashi_vault_validate_certs: yes + ansible_hashi_vault_proxies: http://127.0.0.1:4567 + ansible_hashi_vault_retries: 2 + + - assert: + that: + - result is failed + - result.msg is search('Cannot connect to proxy') + - result.retries == 2 diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/connection_options/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/connection_options/tasks/main.yml new file mode 100644 index 000000000..35a3c02f1 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/connection_options/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- include_tasks: controller.yml +- include_tasks: target.yml diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/connection_options/tasks/target.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/connection_options/tasks/target.yml new file mode 100644 index 000000000..7fb854b8d --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/connection_options/tasks/target.yml @@ -0,0 +1,84 @@ +--- +- name: Connection Tests + module_defaults: + assert: + quiet: yes + vars: + assert_cmd: &assert + assert: + that: result.status.initialized + block: + - name: HTTP connection + module_defaults: + vault_test_connection: + url: '{{ vault_test_server_http }}' + block: + - name: test HTTP + register: result + vault_test_connection: + + - <<: *assert + + - name: test HTTP with proxy + register: result + vault_test_connection: + url: '{{ vault_proxy_alt_vault_http | default(vault_test_server_http) }}' + proxies: '{{ vault_proxy_server }}' + + - <<: *assert + + - name: test HTTP with wrong proxy + register: result + vault_test_connection: + want_exception: yes + proxies: http://127.0.0.1:4567 + retries: 2 + + - assert: + that: + - result.inner is failed + - result.msg is search('Cannot connect to proxy') + - result.retries == 2 + + - name: HTTPS connection + when: vault_run_https_tests | bool + module_defaults: + vault_test_connection: + url: '{{ vault_test_server_https }}' + ca_cert: '{{ vault_cert_file }}' + block: + - name: test HTTPS with no cert validation + register: result + vault_test_connection: + validate_certs: false + + - <<: *assert + + - name: test HTTPS with cert validation + register: result + vault_test_connection: + want_args: yes + + - <<: *assert + + - name: test HTTPS with proxy & cert validation + register: result + vault_test_connection: + url: '{{ vault_proxy_alt_vault_https | default(vault_test_server_http) }}' + proxies: '{{ vault_proxy_server }}' + + - <<: *assert + + - name: test HTTPS with wrong proxy & cert validation + register: result + vault_test_connection: + want_exception: yes + validate_certs: yes + proxies: http://127.0.0.1:4567 + retries: 2 + + - assert: + that: + - result.inner is failed + - result.msg is search('Cannot connect to proxy') + - result.retries == 2 diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/filter_vault_login_token/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/filter_vault_login_token/aliases new file mode 100644 index 000000000..1af1cf90b --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/filter_vault_login_token/aliases @@ -0,0 +1 @@ +context/controller diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/filter_vault_login_token/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/filter_vault_login_token/tasks/main.yml new file mode 100644 index 000000000..52e03a406 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/filter_vault_login_token/tasks/main.yml @@ -0,0 +1,17 @@ +--- +- vars: + token: token_value + login_result: + auth: + client_token: '{{ token }}' + module_login_result: + login: '{{ login_result }}' + alternate_field_result: + alt: '{{ login_result }}' + block: + - assert: + that: + - login_result | community.hashi_vault.vault_login_token == token + - login_result | community.hashi_vault.vault_login_token(optional_field='missing') == token + - module_login_result | community.hashi_vault.vault_login_token == token + - alternate_field_result | community.hashi_vault.vault_login_token(optional_field='alt') == token diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_hashi_vault/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_hashi_vault/aliases new file mode 100644 index 000000000..1bb8bf6d7 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_hashi_vault/aliases @@ -0,0 +1 @@ +# empty diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_hashi_vault/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_hashi_vault/meta/main.yml new file mode 100644 index 000000000..d3acb69e9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_hashi_vault/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - setup_vault_test_plugins + - setup_vault_configure diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_hashi_vault/tasks/lookup_setup.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_hashi_vault/tasks/lookup_setup.yml new file mode 100644 index 000000000..193d6fa5e --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_hashi_vault/tasks/lookup_setup.yml @@ -0,0 +1,9 @@ +--- +- name: Configuration tasks + module_defaults: + vault_ci_token_create: '{{ vault_plugins_module_defaults_common }}' + block: + - name: 'Create a test non-root token' + vault_ci_token_create: + policies: test-policy + register: user_token_cmd diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_hashi_vault/tasks/lookup_test.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_hashi_vault/tasks/lookup_test.yml new file mode 100644 index 000000000..21306ee9f --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_hashi_vault/tasks/lookup_test.yml @@ -0,0 +1,112 @@ +--- +- name: Var block + vars: + ansible_hashi_vault_token_validate: true + user_token: '{{ user_token_cmd.result.auth.client_token }}' + kwargs: + url: '{{ vault_test_server_http }}' + auth_method: token + token: '{{ user_token }}' + block: + - name: 'Check kv2 secret raw return value' + vars: + kv2_secret2_as_raw: "{{ lookup('community.hashi_vault.hashi_vault', vault_kv2_api_path ~ '/secret2 ', return_format='raw', **kwargs) }}" + assert: + that: + - "'data' in kv2_secret2_as_raw" + - "'data' in kv2_secret2_as_raw['data']" + - "'metadata' in kv2_secret2_as_raw['data']" + fail_msg: 'Raw return did not contain expected fields.' + + - name: "Check multiple secrets as dict (default)" + vars: + kv2_secrets_as_dict: "{{ lookup('community.hashi_vault.hashi_vault', vault_kv2_multi_api_path ~ '/secrets ', **kwargs) }}" + assert: + that: + - kv2_secrets_as_dict | type_debug == 'dict' + - kv2_secrets_as_dict['value' ~ item] == ('foo' ~ item) + fail_msg: 'Return value was not dict or items do not match.' + loop: [1, 2, 3] + + - name: "Check multiple secrets as dict (explicit)" + vars: + kv2_secrets_as_dict: "{{ lookup('community.hashi_vault.hashi_vault', vault_kv2_multi_api_path ~ '/secrets ', return_format='dict', **kwargs) }}" + assert: + that: + - kv2_secrets_as_dict | type_debug == 'dict' + - kv2_secrets_as_dict['value' ~ item] == ('foo' ~ item) + fail_msg: 'Return value was not dict or items do not match.' + loop: [1, 2, 3] + + - name: "Check multiple secrets as values" + vars: + kv2_secrets_as_values: "{{ query('community.hashi_vault.hashi_vault', vault_kv2_multi_api_path ~ '/secrets ', return_format='values', **kwargs) }}" + assert: + that: + - kv2_secrets_as_values | type_debug == 'list' + - ('foo' ~ item) in kv2_secrets_as_values + fail_msg: 'Return value was not list or items do not match.' + loop: [1, 2, 3] + + ### failure tests + + - name: 'Failure expected when erroneous credentials are used' + vars: + secret_wrong_cred: "{{ lookup('community.hashi_vault.hashi_vault', 'secret=' ~ vault_kv2_api_path ~ '/secret2 token=wrong_token', **kwargs) }}" + debug: + msg: 'Failure is expected ({{ secret_wrong_cred }})' + register: test_wrong_cred + ignore_errors: true + + - assert: + that: + - test_wrong_cred is failed + - test_wrong_cred.msg is search('Invalid Vault Token') + fail_msg: "Expected failure but got success or wrong failure message." + + - name: 'Failure expected when unauthorized secret is read' + vars: + secret_unauthorized: "{{ lookup('community.hashi_vault.hashi_vault', 'secret=' ~ vault_kv2_api_path ~ '/secret3', **kwargs) }}" + debug: + msg: 'Failure is expected ({{ secret_unauthorized }})' + register: test_unauthorized + ignore_errors: true + + - assert: + that: + - test_unauthorized is failed + - test_unauthorized.msg is search('Permission Denied') + fail_msg: "Expected failure but got success or wrong failure message." + + - name: 'Failure expected when inexistent secret is read' + vars: + secret_inexistent: "{{ lookup('community.hashi_vault.hashi_vault', 'secret=' ~ vault_kv2_api_path ~ '/non_existent_secret', **kwargs) }}" + debug: + msg: 'Failure is expected ({{ secret_inexistent }})' + register: test_inexistent + ignore_errors: true + + - assert: + that: + - test_inexistent is failed + - test_inexistent.msg is search("doesn't seem to exist") + fail_msg: "Expected failure but got success or wrong failure message." + + # TODO: v5.0.0 - uncomment below: https://github.com/ansible-collections/community.hashi_vault/pull/350 + # - name: Failure expected when duplicate terms are used in the term string + # vars: + # duplicate_terms: >- + # {{ + # lookup('community.hashi_vault.hashi_vault', + # vault_kv2_api_path ~ '/secrets secret=' ~ vault_kv2_api_path ~ '/secret2', + # **kwargs) + # }} + # ansible.builtin.debug: + # msg: 'Failure is expected ({{ duplicate_terms }})' + # register: test_duplicate + # ignore_errors: true + + # - assert: + # that: + # - test_duplicate is failed + # - test_duplicate.msg is search("^Duplicate key 'secret' in term string") diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_hashi_vault/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_hashi_vault/tasks/main.yml new file mode 100644 index 000000000..38f8a532d --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_hashi_vault/tasks/main.yml @@ -0,0 +1,8 @@ +--- +#################################################################### +# WARNING: These are designed specifically for Ansible tests # +# and should not be used as examples of how to write Ansible roles # +#################################################################### + +- import_tasks: lookup_setup.yml +- import_tasks: lookup_test.yml diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_ansible_settings/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_ansible_settings/aliases new file mode 100644 index 000000000..1bb8bf6d7 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_ansible_settings/aliases @@ -0,0 +1 @@ +# empty diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_ansible_settings/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_ansible_settings/meta/main.yml new file mode 100644 index 000000000..d3acb69e9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_ansible_settings/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - setup_vault_test_plugins + - setup_vault_configure diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_ansible_settings/tasks/lookup_vault_ansible_settings_configure.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_ansible_settings/tasks/lookup_vault_ansible_settings_configure.yml new file mode 100644 index 000000000..b92955d5e --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_ansible_settings/tasks/lookup_vault_ansible_settings_configure.yml @@ -0,0 +1,9 @@ +--- +- name: Configuration tasks + module_defaults: + vault_ci_token_create: '{{ vault_plugins_module_defaults_common }}' + block: + - name: Create a test non-root token + vault_ci_token_create: + policies: test-policy + register: user_token_cmd diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_ansible_settings/tasks/lookup_vault_ansible_settings_test.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_ansible_settings/tasks/lookup_vault_ansible_settings_test.yml new file mode 100644 index 000000000..6d38943bd --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_ansible_settings/tasks/lookup_vault_ansible_settings_test.yml @@ -0,0 +1,102 @@ +--- +- name: assert defaults + vars: + settings: null + module_defaults: + assert: + quiet: true + fail_msg: 'settings: {{ settings }}' + block: + - name: Call with no options + vars: + settings: "{{ lookup('community.hashi_vault.vault_ansible_settings') }}" + assert: + that: + - settings | type_debug == 'dict' + - settings | length == 1 + - "'timeout' in settings" + - settings.timeout == 5 + # ^ needs to match settings in tests/integration/integration.cfg + + - name: Call a non-default plugin (fqcn) + vars: + ansible_hashi_vault_engine_mount_point: canary + settings: "{{ lookup('community.hashi_vault.vault_ansible_settings', plugin='community.hashi_vault.vault_kv2_get') }}" + assert: + that: + - settings | type_debug == 'dict' + - "'engine_mount_point' in settings" + - settings.engine_mount_point == 'canary' + + - name: Call a non-default plugin (short) + vars: + ansible_hashi_vault_engine_mount_point: canary + settings: "{{ lookup('community.hashi_vault.vault_ansible_settings', plugin='vault_kv2_get') }}" + assert: + that: + - settings | type_debug == 'dict' + - "'engine_mount_point' in settings" + - settings.engine_mount_point == 'canary' + + - name: Include default + vars: + settings: "{{ lookup('community.hashi_vault.vault_ansible_settings', include_default=True) }}" + assert: + that: + - settings | type_debug == 'dict' + - "'auth_method' in settings" + - settings.auth_method == 'token' + + - name: Include private & none & default + vars: + settings: "{{ lookup('community.hashi_vault.vault_ansible_settings', include_default=True, include_private=True, include_none=True) }}" + assert: + that: + - settings | type_debug == 'dict' + - "'_terms' in settings" + - settings._terms == None + - settings | dict2items | selectattr('value', 'none') | list | length > 1 + + - name: Filters + vars: + settings: "{{ lookup('community.hashi_vault.vault_ansible_settings', '*', '!r*', 'retr[yi]*', '!*s', include_default=True, include_none=True) }}" + assert: + that: + - settings | type_debug == 'dict' + - "'retry_action' in settings" + - "'retries' not in settings" + - settings.retry_action == 'warn' + - settings | dict2items | selectattr('key', 'search', '^r.*') | list | length == 1 + - settings | length > 1 + + - name: No such plugin + register: err + set_fact: + settings: "{{ lookup('community.hashi_vault.vault_ansible_settings', plugin='not.real.plugin') }}" + ignore_errors: true + + - assert: + that: + - err is failed + - > + "'not.real.plugin' plugin not found" in err.msg + + - name: Set templated variables for testing + set_fact: + ansible_hashi_vault_url: '{{ vault_test_server_http }}' + ansible_hashi_vault_token: '{{ user_token_cmd.result.auth.client_token }}' + + - name: Perform a login without config (failure expected) + register: err + community.hashi_vault.vault_login: + url: http://nothing + failed_when: err is not failed or 'No Vault Token specified or discovered' not in err.msg + + - name: Set defaults + vars: + ansible_hashi_vault_auth_method: token + module_defaults: + community.hashi_vault.vault_login: "{{ lookup('community.hashi_vault.vault_ansible_settings') }}" + block: + - name: Perform a login with defaulted config + community.hashi_vault.vault_login: diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_ansible_settings/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_ansible_settings/tasks/main.yml new file mode 100644 index 000000000..c0e528be4 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_ansible_settings/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- import_tasks: lookup_vault_ansible_settings_configure.yml +- import_tasks: lookup_vault_ansible_settings_test.yml diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv1_get/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv1_get/aliases new file mode 100644 index 000000000..1bb8bf6d7 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv1_get/aliases @@ -0,0 +1 @@ +# empty diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv1_get/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv1_get/meta/main.yml new file mode 100644 index 000000000..d3acb69e9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv1_get/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - setup_vault_test_plugins + - setup_vault_configure diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv1_get/tasks/lookup_vault_kv1_get_setup.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv1_get/tasks/lookup_vault_kv1_get_setup.yml new file mode 100644 index 000000000..0907d3e01 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv1_get/tasks/lookup_vault_kv1_get_setup.yml @@ -0,0 +1,9 @@ +--- +- name: Configuration tasks + module_defaults: + vault_ci_token_create: '{{ vault_plugins_module_defaults_common }}' + block: + - name: 'Create a test non-root token' + vault_ci_token_create: + policies: [test-policy, invalid-kv2] + register: user_token_cmd diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv1_get/tasks/lookup_vault_kv1_get_test.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv1_get/tasks/lookup_vault_kv1_get_test.yml new file mode 100644 index 000000000..ed317023d --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv1_get/tasks/lookup_vault_kv1_get_test.yml @@ -0,0 +1,151 @@ +--- +- name: Var block + vars: + ansible_hashi_vault_token_validate: true + user_token: '{{ user_token_cmd.result.auth.client_token }}' + kwargs: + url: '{{ vault_test_server_http }}' + auth_method: token + token: '{{ user_token }}' + engine_mount_point: '{{ vault_kv1_mount_point }}' + block: + - name: Check kv1 secret read + vars: + kv1_secret2: "{{ lookup('community.hashi_vault.vault_kv1_get', vault_kv1_path ~ '/secret2', **kwargs) }}" + assert: + that: + - "'raw' in kv1_secret2" + - "'data' in kv1_secret2" + - "'metadata' in kv1_secret2" + - "'secret' in kv1_secret2" + - "'data' in kv1_secret2['raw']" + - kv1_secret2['data'] == kv1_secret2['raw']['data'] + - kv1_secret2['secret'] == kv1_secret2['data'] + - kv1_secret2['metadata'] | combine({'data':kv1_secret2['data']}) == kv1_secret2['raw'] + fail_msg: 'Return value did not contain expected fields.' + + - name: Check multiple path read as array + vars: + paths: + - '{{ vault_kv1_path }}/secret2' + - '{{ vault_kv1_path }}/secret1' + kv1_secrets: "{{ lookup('community.hashi_vault.vault_kv1_get', *paths, **kwargs) }}" + assert: + that: + - kv1_secrets | type_debug == 'list' + - item | type_debug == 'dict' + - "'raw' in item" + - "'data' in item" + - "'metadata' in item" + - "'secret' in item" + - "'data' in item['raw']" + - item['data'] == item['raw']['data'] + - item['secret'] == item['data'] + - item['metadata'] | combine({'data':item['data']}) == item['raw'] + fail_msg: 'Return value was not correct type or items do not match.' + loop: '{{ kv1_secrets }}' + + + ### failure tests + + - name: Test defualt path value + vars: + default_path_access: "{{ lookup('community.hashi_vault.vault_kv1_get', vault_kv1_path ~ '/secret2', url=kwargs.url, token=kwargs.token) }}" + debug: + msg: 'Failure is expected ({{ default_path_access }})' + register: default_path + ignore_errors: true + + - assert: + that: + - default_path is failed + - default_path.msg is search('Permission Denied to path') + + - name: Failure expected when erroneous credentials are used + vars: + secret_wrong_cred: "{{ lookup('community.hashi_vault.vault_kv1_get', vault_kv1_path ~ '/secret2', token='wrong_token', url=kwargs.url) }}" + debug: + msg: 'Failure is expected ({{ secret_wrong_cred }})' + register: test_wrong_cred + ignore_errors: true + + - assert: + that: + - test_wrong_cred is failed + - test_wrong_cred.msg is search('Invalid Vault Token') + fail_msg: "Expected failure but got success or wrong failure message." + + - name: Failure expected when unauthorized secret is read + vars: + secret_unauthorized: "{{ lookup('community.hashi_vault.vault_kv1_get', vault_kv1_path ~ '/secret3', **kwargs) }}" + debug: + msg: 'Failure is expected ({{ secret_unauthorized }})' + register: test_unauthorized + ignore_errors: true + + - assert: + that: + - test_unauthorized is failed + - test_unauthorized.msg is search('Permission Denied') + fail_msg: "Expected failure but got success or wrong failure message." + + - name: Failure expected when inexistent secret is read + vars: + secret_inexistent: "{{ lookup('community.hashi_vault.vault_kv1_get', vault_kv1_path ~ '/non_existent_secret', **kwargs) }}" + debug: + msg: 'Failure is expected ({{ secret_inexistent }})' + register: test_inexistent + ignore_errors: true + + - assert: + that: + - test_inexistent is failed + - test_inexistent.msg is search("missing path") + fail_msg: "Expected failure but got success or wrong failure message." + + - name: Specific failure message expected when trying to read kv2 + vars: + kv2_read: >- + {{ + lookup( + 'community.hashi_vault.vault_kv1_get', + vault_kv2_path ~ '/secret2', + engine_mount_point=vault_kv2_mount_point, + url=kwargs.url, token=kwargs.token + ) + }} + debug: + msg: 'Failure is expected ({{ kv2_read }})' + register: test_kv_mismatch + ignore_errors: true + + - assert: + that: + - test_kv_mismatch is failed + - test_kv_mismatch.msg is search('If this is a KV version 2 path, use community.hashi_vault.vault_kv2_get') + + + # do this last so our set_fact doesn't affect any other tests + - name: Set the vars that will configure the lookup settings we can't set via with_ + set_fact: + ansible_hashi_vault_url: '{{ kwargs.url }}' + ansible_hashi_vault_token: '{{ kwargs.token }}' + ansible_hashi_vault_auth_method: '{{ kwargs.auth_method }}' + ansible_hashi_vault_engine_mount_point: '{{ kwargs.engine_mount_point }}' + + - name: Check multiple path read via with_ + assert: + that: + - item | type_debug == 'dict' + - "'raw' in item" + - "'data' in item" + - "'metadata' in item" + - "'secret' in item" + - "'data' in item['raw']" + - item['data'] == item['raw']['data'] + - item['secret'] == item['data'] + - item['metadata'] | combine({'data':item['data']}) == item['raw'] + fail_msg: 'Return value was not correct type or items do not match.' + with_community.hashi_vault.vault_kv1_get: + - '{{ vault_kv1_path }}/secret2' + - '{{ vault_kv1_path }}/secret1' diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv1_get/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv1_get/tasks/main.yml new file mode 100644 index 000000000..eeb76ac53 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv1_get/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- import_tasks: lookup_vault_kv1_get_setup.yml +- import_tasks: lookup_vault_kv1_get_test.yml diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv2_get/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv2_get/aliases new file mode 100644 index 000000000..1bb8bf6d7 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv2_get/aliases @@ -0,0 +1 @@ +# empty diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv2_get/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv2_get/meta/main.yml new file mode 100644 index 000000000..d3acb69e9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv2_get/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - setup_vault_test_plugins + - setup_vault_configure diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv2_get/tasks/lookup_vault_kv2_get_setup.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv2_get/tasks/lookup_vault_kv2_get_setup.yml new file mode 100644 index 000000000..4c598d06c --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv2_get/tasks/lookup_vault_kv2_get_setup.yml @@ -0,0 +1,9 @@ +--- +- name: Configuration tasks + module_defaults: + vault_ci_token_create: '{{ vault_plugins_module_defaults_common }}' + block: + - name: 'Create a test non-root token' + vault_ci_token_create: + policies: [test-policy] + register: user_token_cmd diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv2_get/tasks/lookup_vault_kv2_get_test.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv2_get/tasks/lookup_vault_kv2_get_test.yml new file mode 100644 index 000000000..b335290ec --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv2_get/tasks/lookup_vault_kv2_get_test.yml @@ -0,0 +1,177 @@ +--- +- name: Var block + vars: + ansible_hashi_vault_token_validate: true + user_token: '{{ user_token_cmd.result.auth.client_token }}' + kwargs: + url: '{{ vault_test_server_http }}' + auth_method: token + token: '{{ user_token }}' + engine_mount_point: '{{ vault_kv2_mount_point }}' + block: + - name: Check kv2 secret read + vars: + kv2_result: "{{ lookup('community.hashi_vault.vault_kv2_get', vault_kv2_path ~ '/secret2', **kwargs) }}" + assert: + that: &assert_basics + - "'raw' in kv2_result" + - "'data' in kv2_result" + - "'metadata' in kv2_result" + - "'secret' in kv2_result" + - "'data' in kv2_result['raw']" + - kv2_result['data'] == kv2_result['raw']['data'] + - kv2_result['secret'] == kv2_result['data']['data'] + - kv2_result['metadata'] == kv2_result['data']['metadata'] + fail_msg: 'Return value did not contain expected fields.' + + - name: Check kv2 versioned (latest) + set_fact: + kv2_result: "{{ lookup('community.hashi_vault.vault_kv2_get', vault_kv2_versioned_path ~ '/twover', **kwargs) }}" + + - assert: + that: *assert_basics + + - name: Check version-specific expected values (latest) + vars: &expected_ver2 + expected: + version: 2 + a: two + v: 2 + assert: + that: &assert_expected + - kv2_result.metadata.version == expected.version + - kv2_result.secret.a == expected.a + - kv2_result.secret.v == expected.v + + - name: Check kv2 versioned (2) + set_fact: + kv2_result: "{{ lookup('community.hashi_vault.vault_kv2_get', vault_kv2_versioned_path ~ '/twover', version=2, **kwargs) }}" + + - assert: + that: *assert_basics + + - name: Check version-specific expected values (2) + vars: *expected_ver2 + assert: + that: *assert_expected + + - name: Check kv2 versioned (1) + set_fact: + kv2_result: "{{ lookup('community.hashi_vault.vault_kv2_get', vault_kv2_versioned_path ~ '/twover', version=1, **kwargs) }}" + + - assert: + that: *assert_basics + + - name: Check version-specific expected values (1) + vars: + expected: + version: 1 + a: one + v: 1 + assert: + that: *assert_expected + + - name: Check multiple path read as array + vars: + paths: + - '{{ vault_kv2_path }}/secret2' + - '{{ vault_kv2_path }}/secret1' + kv2_secrets: "{{ lookup('community.hashi_vault.vault_kv2_get', *paths, **kwargs) }}" + assert: + that: + - kv2_secrets | type_debug == 'list' + - item | type_debug == 'dict' + - "'raw' in item" + - "'data' in item" + - "'metadata' in item" + - "'secret' in item" + - "'data' in item['raw']" + - item['data'] == item['raw']['data'] + - item['secret'] == item['data']['data'] + - item['metadata'] == item['data']['metadata'] + fail_msg: 'Return value was not correct type or items do not match.' + loop: '{{ kv2_secrets }}' + + + ### failure tests + + - name: Test defualt path value + vars: + default_path_access: "{{ lookup('community.hashi_vault.vault_kv2_get', vault_kv2_path ~ '/secret2', url=kwargs.url, token=kwargs.token) }}" + debug: + msg: 'Failure is expected ({{ default_path_access }})' + register: default_path + ignore_errors: true + + - assert: + that: + - default_path is failed + - default_path.msg is search('Permission Denied to path') + + - name: Failure expected when erroneous credentials are used + vars: + secret_wrong_cred: "{{ lookup('community.hashi_vault.vault_kv2_get', vault_kv2_path ~ '/secret2', token='wrong_token', url=kwargs.url) }}" + debug: + msg: 'Failure is expected ({{ secret_wrong_cred }})' + register: test_wrong_cred + ignore_errors: true + + - assert: + that: + - test_wrong_cred is failed + - test_wrong_cred.msg is search('Invalid Vault Token') + fail_msg: "Expected failure but got success or wrong failure message." + + - name: Failure expected when unauthorized secret is read + vars: + secret_unauthorized: "{{ lookup('community.hashi_vault.vault_kv2_get', vault_kv2_path ~ '/secret3', **kwargs) }}" + debug: + msg: 'Failure is expected ({{ secret_unauthorized }})' + register: test_unauthorized + ignore_errors: true + + - assert: + that: + - test_unauthorized is failed + - test_unauthorized.msg is search('Permission Denied') + fail_msg: "Expected failure but got success or wrong failure message." + + - name: Failure expected when inexistent secret is read + vars: + secret_inexistent: "{{ lookup('community.hashi_vault.vault_kv2_get', vault_kv2_path ~ '/non_existent_secret', **kwargs) }}" + debug: + msg: 'Failure is expected ({{ secret_inexistent }})' + register: test_inexistent + ignore_errors: true + + - assert: + that: + - test_inexistent is failed + - test_inexistent.msg is search("missing path") + fail_msg: "Expected failure but got success or wrong failure message." + + + # do this last so our set_fact doesn't affect any other tests + - name: Set the vars that will configure the lookup settings we can't set via with_ + set_fact: + ansible_hashi_vault_url: '{{ kwargs.url }}' + ansible_hashi_vault_token: '{{ kwargs.token }}' + ansible_hashi_vault_auth_method: '{{ kwargs.auth_method }}' + ansible_hashi_vault_engine_mount_point: '{{ kwargs.engine_mount_point }}' + + - name: Check multiple path read via with_ + assert: + that: + - item | type_debug == 'dict' + - "'raw' in item" + - "'data' in item" + - "'metadata' in item" + - "'secret' in item" + - "'data' in item['raw']" + - item['data'] == item['raw']['data'] + - item['secret'] == item['data']['data'] + - item['metadata'] == item['data']['metadata'] + fail_msg: 'Return value was not correct type or items do not match.' + with_community.hashi_vault.vault_kv2_get: + - '{{ vault_kv2_path }}/secret2' + - '{{ vault_kv2_path }}/secret1' diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv2_get/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv2_get/tasks/main.yml new file mode 100644 index 000000000..890bc0b62 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_kv2_get/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- import_tasks: lookup_vault_kv2_get_setup.yml +- import_tasks: lookup_vault_kv2_get_test.yml diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_list/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_list/aliases new file mode 100644 index 000000000..1bb8bf6d7 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_list/aliases @@ -0,0 +1 @@ +# empty diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_list/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_list/meta/main.yml new file mode 100644 index 000000000..d3acb69e9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_list/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - setup_vault_test_plugins + - setup_vault_configure diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_list/tasks/lookup_vault_list_setup.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_list/tasks/lookup_vault_list_setup.yml new file mode 100644 index 000000000..193d6fa5e --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_list/tasks/lookup_vault_list_setup.yml @@ -0,0 +1,9 @@ +--- +- name: Configuration tasks + module_defaults: + vault_ci_token_create: '{{ vault_plugins_module_defaults_common }}' + block: + - name: 'Create a test non-root token' + vault_ci_token_create: + policies: test-policy + register: user_token_cmd diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_list/tasks/lookup_vault_list_test.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_list/tasks/lookup_vault_list_test.yml new file mode 100644 index 000000000..9c2190208 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_list/tasks/lookup_vault_list_test.yml @@ -0,0 +1,137 @@ +--- +- name: Var block + vars: + ansible_hashi_vault_token_validate: true + user_token: '{{ user_token_cmd.result.auth.client_token }}' + kwargs: + url: '{{ vault_test_server_http }}' + auth_method: token + token: '{{ user_token }}' + block: + - name: 'Check kv2 secret list' + vars: + kv2_secret2: "{{ lookup('community.hashi_vault.vault_list', vault_kv2_api_list_path, **kwargs) }}" + assert: + that: + - "'data' in kv2_secret2" + - "'keys' in kv2_secret2['data']" + fail_msg: 'Return value did not contain expected fields.' + + - name: 'Check kv2 mount point list' + vars: + kv2_mount_point: "{{ lookup('community.hashi_vault.vault_list', vault_kv2_api_list_mount_point, **kwargs) }}" + assert: + that: + - "'data' in kv2_mount_point" + - "'keys' in kv2_mount_point['data']" + fail_msg: 'Return value did not contain expected fields.' + + - name: "Check multiple path list as array" + vars: + paths: + - '{{ vault_kv2_api_list_path }}' + - '{{ vault_policy_api_list_path }}' + list_results: "{{ lookup('community.hashi_vault.vault_list', *paths, **kwargs) }}" + assert: + that: + - list_results | type_debug == 'list' + - item | type_debug == 'dict' + - "'data' in item" + - "'keys' in item['data']" + - item['data']['keys'] | type_debug == 'list' + fail_msg: 'Return value was not correct type or items do not match.' + loop: '{{ list_results }}' + + + ### failure tests + + - name: 'Failure expected when erroneous credentials are used' + vars: + secret_wrong_cred: "{{ lookup('community.hashi_vault.vault_list', vault_kv2_api_list_path, token='wrong_token', url=kwargs.url) }}" + debug: + msg: 'Failure is expected ({{ secret_wrong_cred }})' + register: test_wrong_cred + ignore_errors: true + + - assert: + that: + - test_wrong_cred is failed + - test_wrong_cred.msg is search('Invalid Vault Token') + fail_msg: "Expected failure but got success or wrong failure message." + + - name: 'Failure expected when unauthorized path is provided' + vars: + secret_unauthorized: "{{ lookup('community.hashi_vault.vault_list', unauthorized_vault_kv2_mount_point, **kwargs) }}" + debug: + msg: 'Failure is expected ({{ secret_unauthorized }})' + register: test_unauthorized + ignore_errors: true + + - assert: + that: + - test_unauthorized is failed + - test_unauthorized.msg is search('Permission Denied') + fail_msg: "Expected failure but got success or wrong failure message." + + # When an inexistent mount point is listed, the API returns a 403 error, not 404. + - name: 'Failure expected when inexistent mount point is listed' + vars: + mount_point_inexistent: "{{ lookup('community.hashi_vault.vault_list', vault_kv2_api_list_inexistent_mount_point, **kwargs) }}" + debug: + msg: 'Failure is expected ({{ mount_point_inexistent }})' + register: test_inexistent_mount_point + ignore_errors: true + + - assert: + that: + - test_inexistent_mount_point is failed + - test_inexistent_mount_point.msg is search("Permission Denied") + fail_msg: "Expected failure but got success or wrong failure message." + + - name: 'Failure expected when inexistent path is listed' + vars: + path_inexistent: "{{ lookup('community.hashi_vault.vault_list', vault_kv2_api_list_inexistent_path, **kwargs) }}" + debug: + msg: 'Failure is expected ({{ path_inexistent }})' + register: test_inexistent + ignore_errors: true + + - assert: + that: + - test_inexistent is failed + - test_inexistent.msg is search("doesn't seem to exist") + fail_msg: "Expected failure but got success or wrong failure message." + + # If an inexistent path is included in a policy statement that denies access, the list API returns a 403 error. + - name: 'Failure expected when inexistent path is listed but is explicitly mentioned in a policy statement' + vars: + path_inexistent_unauthorized: "{{ lookup('community.hashi_vault.vault_list', vault_kv2_api_list_inexistent_unauthorized_path, **kwargs) }}" + debug: + msg: 'Failure is expected ({{ path_inexistent_unauthorized }})' + register: test_inexistent_unauthorized + ignore_errors: true + + - assert: + that: + - test_inexistent_unauthorized is failed + - test_inexistent_unauthorized.msg is search("Permission Denied") + fail_msg: "Expected failure but got success or wrong failure message." + + # do this last so our set_fact doesn't affect any other tests + - name: Set the vars that will configure the lookup settings we can't set via with_ + set_fact: + ansible_hashi_vault_url: '{{ kwargs.url }}' + ansible_hashi_vault_token: '{{ kwargs.token }}' + ansible_hashi_vault_auth_method: '{{ kwargs.auth_method }}' + + - name: Check multiple path list via with_ + assert: + that: + - item | type_debug == 'dict' + - "'data' in item" + - "'keys' in item['data']" + - item['data']['keys'] | type_debug == 'list' + fail_msg: 'Return value was not correct type or items do not match.' + with_community.hashi_vault.vault_list: + - '{{ vault_kv2_api_list_path }}' + - '{{ vault_policy_api_list_path }}' diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_list/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_list/tasks/main.yml new file mode 100644 index 000000000..e0caae6bd --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_list/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- import_tasks: lookup_vault_list_setup.yml +- import_tasks: lookup_vault_list_test.yml diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_login/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_login/aliases new file mode 100644 index 000000000..1bb8bf6d7 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_login/aliases @@ -0,0 +1 @@ +# empty diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_login/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_login/meta/main.yml new file mode 100644 index 000000000..d3acb69e9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_login/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - setup_vault_test_plugins + - setup_vault_configure diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_login/tasks/lookup_vault_login_setup.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_login/tasks/lookup_vault_login_setup.yml new file mode 100644 index 000000000..42ec2b1ca --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_login/tasks/lookup_vault_login_setup.yml @@ -0,0 +1,15 @@ +--- +- name: Configuration tasks + module_defaults: + vault_ci_token_create: '{{ vault_plugins_module_defaults_common }}' + block: + - name: Create a test non-root token + vault_ci_token_create: + policies: test-policy + register: user_token_cmd + + - name: Create a test non-root token with no default policy + vault_ci_token_create: + policies: test-policy + no_default_policy: true + register: user_token_no_default_policy_cmd diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_login/tasks/lookup_vault_login_test.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_login/tasks/lookup_vault_login_test.yml new file mode 100644 index 000000000..71f2fd162 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_login/tasks/lookup_vault_login_test.yml @@ -0,0 +1,69 @@ +--- +- name: Var block + vars: + user_token: '{{ user_token_cmd.result.auth.client_token }}' + ansible_hashi_vault_auth_method: token + kwargs: + url: '{{ vault_test_server_http }}' + token: '{{ user_token }}' + block: + - name: Login with token + vars: + result: "{{ lookup('community.hashi_vault.vault_login', **kwargs) }}" + assert: + that: + - "'auth' in result" + - "'client_token' in result.auth" + - result.auth.client_token == user_token + - "'policies' in result.auth" + - "'test-policy' in result.auth.policies" + - "'default' in result.auth.policies" + + - name: Login with token without validation + vars: + result: "{{ lookup('community.hashi_vault.vault_login', token_validate=False, **kwargs) }}" + assert: + that: + - "'auth' in result" + - "'client_token' in result.auth" + - result.auth.client_token == user_token + - "'policies' not in result.auth" + + # This won't actually fail if warnings aren't produced, but if the code crashes, we'll catch it here. + # Unit tests check for the correct warnings. + - name: Login with token (should produce warnings about term strings) + vars: + result: "{{ query('community.hashi_vault.vault_login', 'first-term', 'second-term', token_validate=False, **kwargs) }}" + assert: + that: + - result | count == 1 + fail_msg: 'vault_login should only ever return 1 result. Got: {{ result }}' + + ### failure tests + + - name: none auth method is not supported + vars: + result: "{{ query('community.hashi_vault.vault_login', auth_method='none', **kwargs) }}" + debug: + msg: "{{ result }} should have failed." + register: none_result + ignore_errors: true + + - assert: + that: + - none_result is failed + - none_result.msg is search("The 'none' auth method is not valid for this lookup.") + + - name: Try to lookup-self without permission + vars: + user_token: '{{ user_token_no_default_policy_cmd.result.auth.client_token }}' + result: "{{ lookup('community.hashi_vault.vault_login', **kwargs) }}" + debug: + msg: "{{ result }} should have failed." + register: lookup_result + ignore_errors: true + + - assert: + that: + - lookup_result is failed + - lookup_result.msg is search('Invalid Vault Token Specified') diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_login/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_login/tasks/main.yml new file mode 100644 index 000000000..f1275ad38 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_login/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- import_tasks: lookup_vault_login_setup.yml +- import_tasks: lookup_vault_login_test.yml diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_read/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_read/aliases new file mode 100644 index 000000000..1bb8bf6d7 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_read/aliases @@ -0,0 +1 @@ +# empty diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_read/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_read/meta/main.yml new file mode 100644 index 000000000..d3acb69e9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_read/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - setup_vault_test_plugins + - setup_vault_configure diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_read/tasks/lookup_vault_read_setup.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_read/tasks/lookup_vault_read_setup.yml new file mode 100644 index 000000000..193d6fa5e --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_read/tasks/lookup_vault_read_setup.yml @@ -0,0 +1,9 @@ +--- +- name: Configuration tasks + module_defaults: + vault_ci_token_create: '{{ vault_plugins_module_defaults_common }}' + block: + - name: 'Create a test non-root token' + vault_ci_token_create: + policies: test-policy + register: user_token_cmd diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_read/tasks/lookup_vault_read_test.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_read/tasks/lookup_vault_read_test.yml new file mode 100644 index 000000000..111ccdcf8 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_read/tasks/lookup_vault_read_test.yml @@ -0,0 +1,99 @@ +--- +- name: Var block + vars: + ansible_hashi_vault_token_validate: true + user_token: '{{ user_token_cmd.result.auth.client_token }}' + kwargs: + url: '{{ vault_test_server_http }}' + auth_method: token + token: '{{ user_token }}' + block: + - name: 'Check kv2 secret read' + vars: + kv2_secret2: "{{ lookup('community.hashi_vault.vault_read', vault_kv2_api_path ~ '/secret2', **kwargs) }}" + assert: + that: + - "'data' in kv2_secret2" + - "'data' in kv2_secret2['data']" + - "'metadata' in kv2_secret2['data']" + fail_msg: 'Return value did not contain expected fields.' + + - name: "Check multiple path read as array" + vars: + paths: + - '{{ vault_kv2_api_path }}/secret2' + - '{{ vault_kv2_api_path }}/secret1' + kv2_secrets: "{{ lookup('community.hashi_vault.vault_read', *paths, **kwargs) }}" + assert: + that: + - kv2_secrets | type_debug == 'list' + - item | type_debug == 'dict' + - "'data' in item" + - "'data' in item['data']" + - "'value' in item['data']['data']" + fail_msg: 'Return value was not correct type or items do not match.' + loop: '{{ kv2_secrets }}' + + + ### failure tests + + - name: 'Failure expected when erroneous credentials are used' + vars: + secret_wrong_cred: "{{ lookup('community.hashi_vault.vault_read', vault_kv2_api_path ~ '/secret2', token='wrong_token', url=kwargs.url) }}" + debug: + msg: 'Failure is expected ({{ secret_wrong_cred }})' + register: test_wrong_cred + ignore_errors: true + + - assert: + that: + - test_wrong_cred is failed + - test_wrong_cred.msg is search('Invalid Vault Token') + fail_msg: "Expected failure but got success or wrong failure message." + + - name: 'Failure expected when unauthorized secret is read' + vars: + secret_unauthorized: "{{ lookup('community.hashi_vault.vault_read', vault_kv2_api_path ~ '/secret3', **kwargs) }}" + debug: + msg: 'Failure is expected ({{ secret_unauthorized }})' + register: test_unauthorized + ignore_errors: true + + - assert: + that: + - test_unauthorized is failed + - test_unauthorized.msg is search('Permission Denied') + fail_msg: "Expected failure but got success or wrong failure message." + + - name: 'Failure expected when inexistent secret is read' + vars: + secret_inexistent: "{{ lookup('community.hashi_vault.vault_read', vault_kv2_api_path ~ '/non_existent_secret', **kwargs) }}" + debug: + msg: 'Failure is expected ({{ secret_inexistent }})' + register: test_inexistent + ignore_errors: true + + - assert: + that: + - test_inexistent is failed + - test_inexistent.msg is search("doesn't seem to exist") + fail_msg: "Expected failure but got success or wrong failure message." + + # do this last so our set_fact doesn't affect any other tests + - name: Set the vars that will configure the lookup settings we can't set via with_ + set_fact: + ansible_hashi_vault_url: '{{ kwargs.url }}' + ansible_hashi_vault_token: '{{ kwargs.token }}' + ansible_hashi_vault_auth_method: '{{ kwargs.auth_method }}' + + - name: Check multiple path read via with_ + assert: + that: + - item | type_debug == 'dict' + - "'data' in item" + - "'data' in item['data']" + - "'value' in item['data']['data']" + fail_msg: 'Return value was not correct type or items do not match.' + with_community.hashi_vault.vault_read: + - '{{ vault_kv2_api_path }}/secret2' + - '{{ vault_kv2_api_path }}/secret1' diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_read/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_read/tasks/main.yml new file mode 100644 index 000000000..3bec16d7c --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_read/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- import_tasks: lookup_vault_read_setup.yml +- import_tasks: lookup_vault_read_test.yml diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_token_create/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_token_create/aliases new file mode 100644 index 000000000..1bb8bf6d7 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_token_create/aliases @@ -0,0 +1 @@ +# empty diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_token_create/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_token_create/meta/main.yml new file mode 100644 index 000000000..d3acb69e9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_token_create/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - setup_vault_test_plugins + - setup_vault_configure diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_token_create/tasks/lookup_vault_token_create_setup.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_token_create/tasks/lookup_vault_token_create_setup.yml new file mode 100644 index 000000000..b75bd313c --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_token_create/tasks/lookup_vault_token_create_setup.yml @@ -0,0 +1,18 @@ +--- +- name: Configuration tasks + module_defaults: + vault_ci_token_create: '{{ vault_plugins_module_defaults_common }}' + block: + - name: Create a token that can create child tokens + vault_ci_token_create: + policies: + - test-policy + - token-creator + register: child_token_cmd + + - name: Create a token that can create orphan tokens + vault_ci_token_create: + policies: + - test-policy + - orphan-creator + register: orphan_token_cmd diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_token_create/tasks/lookup_vault_token_create_test.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_token_create/tasks/lookup_vault_token_create_test.yml new file mode 100644 index 000000000..de2c9cd7a --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_token_create/tasks/lookup_vault_token_create_test.yml @@ -0,0 +1,137 @@ +--- +- name: Var block + vars: + ansible_hashi_vault_auth_method: token + kwargs: + url: '{{ vault_test_server_http }}' + token: '{{ user_token }}' + ttl: 5m + block: + - name: Test with a child token creator + vars: + user_token: '{{ child_token_cmd.result.auth.client_token }}' + block: + - name: Create a child token + vars: + result: "{{ lookup('community.hashi_vault.vault_token_create', **kwargs) }}" + assert: + that: + - "'auth' in result" + - "'client_token' in result.auth" + - result.auth.client_token != user_token + - "'policies' in result.auth" + - "'test-policy' in result.auth.policies" + - "'default' in result.auth.policies" + - result.auth.orphan == False + + - name: (xfail) Create an orphan token with orphan=true + vars: + result: "{{ lookup('community.hashi_vault.vault_token_create', orphan=True, **kwargs) }}" + debug: + msg: "{{ result }} should have failed." + register: orphan_result + ignore_errors: true + + - assert: + that: + - orphan_result is failed + - orphan_result.msg is search('permission denied') + + - name: (xfail) Create an orphan token with no_parent=true + vars: + result: "{{ lookup('community.hashi_vault.vault_token_create', no_parent=True, **kwargs) }}" + debug: + msg: "{{ result }} should have failed." + register: no_parent_result + ignore_errors: true + + - assert: + that: + - no_parent_result is failed + - no_parent_result.msg is search('root or sudo privileges required to create orphan token') + + - name: Test with a orphan token creator + vars: + user_token: '{{ orphan_token_cmd.result.auth.client_token }}' + block: + - name: Create a child token + vars: + result: "{{ lookup('community.hashi_vault.vault_token_create', **kwargs) }}" + assert: + that: + - "'auth' in result" + - "'client_token' in result.auth" + - result.auth.client_token != user_token + - "'policies' in result.auth" + - "'test-policy' in result.auth.policies" + - "'default' in result.auth.policies" + - result.auth.orphan == False + + - name: Create an orphan token with orphan=true + vars: + result: "{{ lookup('community.hashi_vault.vault_token_create', orphan=True, **kwargs) }}" + assert: + that: + - "'auth' in result" + - "'client_token' in result.auth" + - result.auth.client_token != user_token + - "'policies' in result.auth" + - "'test-policy' in result.auth.policies" + - "'default' in result.auth.policies" + - result.auth.orphan == True + + - name: (xfail) Create an orphan token with no_parent=true + vars: + result: "{{ lookup('community.hashi_vault.vault_token_create', no_parent=True, **kwargs) }}" + debug: + msg: "{{ result }} should have failed." + register: no_parent_result + ignore_errors: true + + - assert: + that: + - no_parent_result is failed + - no_parent_result.msg is search('root or sudo privileges required to create orphan token') + + - name: Test with a root token + vars: + user_token: '{{ vault_dev_root_token_id }}' + block: + - name: Create a child token + vars: + result: "{{ lookup('community.hashi_vault.vault_token_create', policies=['test-policy'], **kwargs) }}" + assert: + that: + - "'auth' in result" + - "'client_token' in result.auth" + - result.auth.client_token != user_token + - "'policies' in result.auth" + - "'test-policy' in result.auth.policies" + - "'default' in result.auth.policies" + - result.auth.orphan == False + + - name: Create an orphan token with orphan=true + vars: + result: "{{ lookup('community.hashi_vault.vault_token_create', orphan=True, policies=['test-policy'], **kwargs) }}" + assert: + that: + - "'auth' in result" + - "'client_token' in result.auth" + - result.auth.client_token != user_token + - "'policies' in result.auth" + - "'test-policy' in result.auth.policies" + - "'default' in result.auth.policies" + - result.auth.orphan == True + + - name: Create an orphan token with no_parent=true + vars: + result: "{{ lookup('community.hashi_vault.vault_token_create', no_parent=True, policies=['test-policy'], **kwargs) }}" + assert: + that: + - "'auth' in result" + - "'client_token' in result.auth" + - result.auth.client_token != user_token + - "'policies' in result.auth" + - "'test-policy' in result.auth.policies" + - "'default' in result.auth.policies" + - result.auth.orphan == True diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_token_create/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_token_create/tasks/main.yml new file mode 100644 index 000000000..614e8f2c7 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_token_create/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- import_tasks: lookup_vault_token_create_setup.yml +- import_tasks: lookup_vault_token_create_test.yml diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_write/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_write/aliases new file mode 100644 index 000000000..1bb8bf6d7 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_write/aliases @@ -0,0 +1 @@ +# empty diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_write/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_write/meta/main.yml new file mode 100644 index 000000000..290705e5e --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_write/meta/main.yml @@ -0,0 +1,3 @@ +--- +dependencies: + - setup_vault_test_plugins diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_write/tasks/lookup_vault_write_setup.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_write/tasks/lookup_vault_write_setup.yml new file mode 100644 index 000000000..53cdbe304 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_write/tasks/lookup_vault_write_setup.yml @@ -0,0 +1,12 @@ +--- +- name: Configuration tasks + module_defaults: + vault_ci_token_create: '{{ vault_plugins_module_defaults_common }}' + block: + - name: 'Create a test non-root token' + vault_ci_token_create: + # we don't need test policy, but if we don't put something here + # it will inherit the root token's capabilities which is what + # we're trying to avoid. + policies: test-policy + register: user_token_cmd diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_write/tasks/lookup_vault_write_test.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_write/tasks/lookup_vault_write_test.yml new file mode 100644 index 000000000..33dc245f9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_write/tasks/lookup_vault_write_test.yml @@ -0,0 +1,95 @@ +--- +- name: Var block + vars: + ansible_hashi_vault_token_validate: true + user_token: '{{ user_token_cmd.result.auth.client_token }}' + kwargs: + url: '{{ vault_test_server_http }}' + auth_method: token + token: '{{ user_token }}' + data: '{{ test_data }}' + test_data: + a: 1 + b: two + block: + - name: Write data to the cubbyhole + vars: + result: "{{ lookup('community.hashi_vault.vault_write', 'cubbyhole/secret1', **kwargs) }}" + assert: + that: + - result == {} + + - name: Write data to multiple paths + vars: + paths: + - cubbyhole/secret2 + - cubbyhole/secret3 + set_fact: + result: "{{ lookup('community.hashi_vault.vault_write', *paths, **kwargs) }}" + + - assert: + that: + - result | type_debug == 'list' + - result | count == 2 + - result[0] == {} + - result[1] == {} + + - name: Check data written + vars: + paths: + - cubbyhole/secret1 + - cubbyhole/secret2 + - cubbyhole/secret3 + set_fact: + result: "{{ lookup('community.hashi_vault.vault_read', *paths, **kwargs) }}" + + - assert: + that: + - item | type_debug == 'dict' + - "'data' in item" + - item.data == test_data + fail_msg: 'Return value was not correct type or items do not match.' + loop: '{{ result }}' + + - name: Write data to an endpoint that returns data and test wrapping + set_fact: + result: "{{ lookup('community.hashi_vault.vault_write', 'sys/wrapping/wrap', wrap_ttl='5m', **kwargs) }}" + + - assert: + that: + - "'wrap_info' in result" + - result.wrap_info.ttl == 300 + + ### failure tests + + - name: Failure expected when erroneous credentials are used + vars: + write_wrong_cred: "{{ lookup('community.hashi_vault.vault_write', 'cubbyhole/secretX', token='wrong_token', url=kwargs.url) }}" + debug: + msg: 'Failure is expected ({{ write_wrong_cred }})' + register: test_wrong_cred + ignore_errors: true + + - assert: + that: + - test_wrong_cred is failed + - test_wrong_cred.msg is search('Invalid Vault Token') + fail_msg: "Expected failure but got success or wrong failure message." + + - name: Failure expected when unauthorized path is written to + vars: + test_data: + type: file + options: + file_path: /dev/null + write_unauthorized: "{{ lookup('community.hashi_vault.vault_write', 'sys/audit/file', **kwargs) }}" + debug: + msg: 'Failure is expected ({{ write_unauthorized }})' + register: test_unauthorized + ignore_errors: true + + - assert: + that: + - test_unauthorized is failed + - test_unauthorized.msg is search('Permission Denied') + fail_msg: "Expected failure but got success or wrong failure message." diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_write/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_write/tasks/main.yml new file mode 100644 index 000000000..6b9b82722 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/lookup_vault_write/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- import_tasks: lookup_vault_write_setup.yml +- import_tasks: lookup_vault_write_test.yml diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv1_get/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv1_get/aliases new file mode 100644 index 000000000..7636a9a65 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv1_get/aliases @@ -0,0 +1 @@ +context/target diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv1_get/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv1_get/meta/main.yml new file mode 100644 index 000000000..d3acb69e9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv1_get/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - setup_vault_test_plugins + - setup_vault_configure diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv1_get/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv1_get/tasks/main.yml new file mode 100644 index 000000000..4bfceeddf --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv1_get/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- import_tasks: module_vault_kv1_get_setup.yml +- import_tasks: module_vault_kv1_get_test.yml diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv1_get/tasks/module_vault_kv1_get_setup.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv1_get/tasks/module_vault_kv1_get_setup.yml new file mode 100644 index 000000000..0907d3e01 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv1_get/tasks/module_vault_kv1_get_setup.yml @@ -0,0 +1,9 @@ +--- +- name: Configuration tasks + module_defaults: + vault_ci_token_create: '{{ vault_plugins_module_defaults_common }}' + block: + - name: 'Create a test non-root token' + vault_ci_token_create: + policies: [test-policy, invalid-kv2] + register: user_token_cmd diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv1_get/tasks/module_vault_kv1_get_test.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv1_get/tasks/module_vault_kv1_get_test.yml new file mode 100644 index 000000000..e6bfb1c1f --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv1_get/tasks/module_vault_kv1_get_test.yml @@ -0,0 +1,105 @@ +--- +- name: Var block + vars: + user_token: '{{ user_token_cmd.result.auth.client_token }}' + module_defaults: + community.hashi_vault.vault_kv1_get: &defaults + url: '{{ vault_test_server_http }}' + auth_method: token + token: '{{ user_token }}' + token_validate: true + timeout: 5 + block: + - name: Test defualt path value + register: default_path + community.hashi_vault.vault_kv1_get: + path: '{{ vault_kv1_path }}/secret2' + ignore_errors: true + + - assert: + that: + - default_path is failed + - default_path.msg is search('Permission Denied to path') + + - module_defaults: + community.hashi_vault.vault_kv1_get: + <<: *defaults + engine_mount_point: '{{ vault_kv1_mount_point }}' + block: + - name: Check kv1 get + register: kv1_secret2 + community.hashi_vault.vault_kv1_get: + path: "{{ vault_kv1_path }}/secret2" + + - assert: + that: &assert_basics + - "'raw' in kv1_secret2" + - "'data' in kv1_secret2" + - "'metadata' in kv1_secret2" + - "'secret' in kv1_secret2" + - "'data' in kv1_secret2['raw']" + - kv1_secret2['data'] == kv1_secret2['raw']['data'] + - kv1_secret2['secret'] == kv1_secret2['data'] + - kv1_secret2['metadata'] | combine({'data':kv1_secret2['data']}) == kv1_secret2['raw'] + fail_msg: 'Return value did not contain expected fields.' + + - name: Check kv1 get (check mode) + register: kv1_secret2 + community.hashi_vault.vault_kv1_get: + path: "{{ vault_kv1_path }}/secret2" + check_mode: true + + - assert: + that: *assert_basics + fail_msg: 'Return value did not contain expected fields.' + + ### failure tests + + - name: Failure expected when erroneous credentials are used + register: test_wrong_cred + community.hashi_vault.vault_kv1_get: + path: "{{ vault_kv1_path }}/secret2" + token: wrong_token + ignore_errors: true + + - assert: + that: + - test_wrong_cred is failed + - test_wrong_cred.msg is search('Invalid Vault Token') + fail_msg: "Expected failure but got success or wrong failure message." + + - name: Failure expected when unauthorized secret is read + register: test_unauthorized + community.hashi_vault.vault_kv1_get: + path: "{{ vault_kv1_path }}/secret3" + ignore_errors: true + + - assert: + that: + - test_unauthorized is failed + - test_unauthorized.msg is search('Permission Denied') + fail_msg: "Expected failure but got success or wrong failure message." + + - name: Failure expected when inexistent secret is read + register: test_inexistent + community.hashi_vault.vault_kv1_get: + path: "{{ vault_kv1_path }}/non_existent_secret" + ignore_errors: true + + - assert: + that: + - test_inexistent is failed + - test_inexistent.msg is search("missing path") + fail_msg: "Expected failure but got success or wrong failure message." + + - name: Specific failure message expected when trying to read kv2 + register: test_kv_mismatch + community.hashi_vault.vault_kv1_get: + engine_mount_point: '{{ vault_kv2_mount_point }}' + path: "{{ vault_kv2_path }}/secret2" + ignore_errors: true + + - assert: + that: + - test_kv_mismatch is failed + - test_kv_mismatch.msg is search('If this is a KV version 2 path, use community.hashi_vault.vault_kv2_get') diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_delete/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_delete/aliases new file mode 100644 index 000000000..7636a9a65 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_delete/aliases @@ -0,0 +1 @@ +context/target diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_delete/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_delete/meta/main.yml new file mode 100644 index 000000000..d3acb69e9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_delete/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - setup_vault_test_plugins + - setup_vault_configure diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_delete/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_delete/tasks/main.yml new file mode 100644 index 000000000..e222b14e6 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_delete/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- import_tasks: module_vault_kv2_delete_setup.yml +- import_tasks: module_vault_kv2_delete_test.yml diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_delete/tasks/module_vault_kv2_delete_setup.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_delete/tasks/module_vault_kv2_delete_setup.yml new file mode 100644 index 000000000..4b058c0ce --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_delete/tasks/module_vault_kv2_delete_setup.yml @@ -0,0 +1,28 @@ +--- +- name: Configuration tasks + module_defaults: + vault_ci_token_create: '{{ vault_plugins_module_defaults_common }}' + block: + - name: Create a test non-root token + vault_ci_token_create: + policies: [test-policy] + register: user_token_cmd + +- name: Configuration tasks + module_defaults: + vault_ci_kv2_destroy_all: '{{ vault_plugins_module_defaults_common }}' + vault_ci_kv_put: '{{ vault_plugins_module_defaults_common }}' + block: + - name: Remove existing multi-version secret + vault_ci_kv2_destroy_all: + mount_point: '{{ vault_kv2_mount_point }}' + path: '{{ vault_kv2_versioned_path }}/secret6' + + - name: Set up a multi versioned secret for delete (v2) + vault_ci_kv_put: + version: 2 + mount_point: '{{ vault_kv2_mount_point }}' + path: '{{ vault_kv2_versioned_path }}/secret6' + secret: + v: value{{ item }} + loop: ["1", "2", "3", "4", "5"] diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_delete/tasks/module_vault_kv2_delete_test.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_delete/tasks/module_vault_kv2_delete_test.yml new file mode 100644 index 000000000..57e770526 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_delete/tasks/module_vault_kv2_delete_test.yml @@ -0,0 +1,232 @@ +--- +- name: Var block + vars: + user_token: '{{ user_token_cmd.result.auth.client_token }}' + regex_secret_version_is_deleted: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T.*" + regex_secret_version_not_deleted: "^$" + + module_defaults: + community.hashi_vault.vault_kv2_delete: &defaults + url: '{{ vault_test_server_http }}' + auth_method: token + token: '{{ user_token }}' + token_validate: true + timeout: 5 + vault_ci_kv2_metadata_read: '{{ vault_plugins_module_defaults_common }}' + + block: + - name: Test default path value + register: default_path + community.hashi_vault.vault_kv2_delete: + path: '{{ vault_kv2_path }}/secret2' + ignore_errors: true + + - assert: + that: + - default_path is failed + - default_path.msg is search('Permission Denied to path') + + - module_defaults: + community.hashi_vault.vault_kv2_delete: + <<: *defaults + engine_mount_point: '{{ vault_kv2_mount_point }}' + block: + + - name: Check kv2 existing versions + register: kv2_result + vault_ci_kv2_metadata_read: + path: "{{ vault_kv2_versioned_path }}/secret6" + mount_point: '{{ vault_kv2_mount_point }}' + + - assert: + that: + - "'result' in kv2_result" + - "'data' in kv2_result['result']" + - "'versions' in kv2_result['result']['data']" + - "kv2_result['result']['data']['versions']['1']['deletion_time'] is search(regex_secret_version_not_deleted)" + - "kv2_result['result']['data']['versions']['2']['deletion_time'] is search(regex_secret_version_not_deleted)" + - "kv2_result['result']['data']['versions']['3']['deletion_time'] is search(regex_secret_version_not_deleted)" + - "kv2_result['result']['data']['versions']['4']['deletion_time'] is search(regex_secret_version_not_deleted)" + - "kv2_result['result']['data']['versions']['5']['deletion_time'] is search(regex_secret_version_not_deleted)" + fail_msg: 'Test Seed value did not contain expected data.' + + + - name: Try kv2 delete latest version in check mode + register: kv2_result + community.hashi_vault.vault_kv2_delete: + path: "{{ vault_kv2_versioned_path }}/secret6" + check_mode: true + + - assert: + that: + - kv2_result is changed + - kv2_result.data == {} + + - name: Read resultant secret versions + register: kv2_result + vault_ci_kv2_metadata_read: + path: "{{ vault_kv2_versioned_path }}/secret6" + mount_point: '{{ vault_kv2_mount_point }}' + + - assert: + that: + - "'result' in kv2_result" + - "'data' in kv2_result['result']" + - "'versions' in kv2_result['result']['data']" + - "kv2_result['result']['data']['versions']['1']['deletion_time'] is search(regex_secret_version_not_deleted)" + - "kv2_result['result']['data']['versions']['2']['deletion_time'] is search(regex_secret_version_not_deleted)" + - "kv2_result['result']['data']['versions']['3']['deletion_time'] is search(regex_secret_version_not_deleted)" + - "kv2_result['result']['data']['versions']['4']['deletion_time'] is search(regex_secret_version_not_deleted)" + - "kv2_result['result']['data']['versions']['5']['deletion_time'] is search(regex_secret_version_not_deleted)" + fail_msg: 'Secret version was deleted while in check mode.' + + + - name: Try kv2 delete specific version in check mode + register: kv2_result + community.hashi_vault.vault_kv2_delete: + path: "{{ vault_kv2_versioned_path }}/secret6" + versions: [1, 3] + check_mode: true + + - name: Read resultant secret versions + register: kv2_result + vault_ci_kv2_metadata_read: + path: "{{ vault_kv2_versioned_path }}/secret6" + mount_point: '{{ vault_kv2_mount_point }}' + + - assert: + that: + - "'result' in kv2_result" + - "'data' in kv2_result['result']" + - "'versions' in kv2_result['result']['data']" + - "kv2_result['result']['data']['versions']['1']['deletion_time'] is search(regex_secret_version_not_deleted)" + - "kv2_result['result']['data']['versions']['2']['deletion_time'] is search(regex_secret_version_not_deleted)" + - "kv2_result['result']['data']['versions']['3']['deletion_time'] is search(regex_secret_version_not_deleted)" + - "kv2_result['result']['data']['versions']['4']['deletion_time'] is search(regex_secret_version_not_deleted)" + - "kv2_result['result']['data']['versions']['5']['deletion_time'] is search(regex_secret_version_not_deleted)" + fail_msg: 'Secret version was deleted while in check mode.' + + + - name: Try kv2 delete version 1 and 3 + register: kv2_result + community.hashi_vault.vault_kv2_delete: + path: "{{ vault_kv2_versioned_path }}/secret6" + versions: + - 1 + - 3 + + - name: Read resultant secret versions + register: kv2_result + vault_ci_kv2_metadata_read: + path: "{{ vault_kv2_versioned_path }}/secret6" + mount_point: '{{ vault_kv2_mount_point }}' + + - assert: + that: + - "'result' in kv2_result" + - "'data' in kv2_result['result']" + - "'versions' in kv2_result['result']['data']" + - "kv2_result['result']['data']['versions']['1']['deletion_time'] is search(regex_secret_version_is_deleted)" + - "kv2_result['result']['data']['versions']['2']['deletion_time'] is search(regex_secret_version_not_deleted)" + - "kv2_result['result']['data']['versions']['3']['deletion_time'] is search(regex_secret_version_is_deleted)" + - "kv2_result['result']['data']['versions']['4']['deletion_time'] is search(regex_secret_version_not_deleted)" + - "kv2_result['result']['data']['versions']['5']['deletion_time'] is search(regex_secret_version_not_deleted)" + fail_msg: 'Result value did not contain expected data.' + + + - name: Try kv2 delete latest version + register: kv2_result + community.hashi_vault.vault_kv2_delete: + path: "{{ vault_kv2_versioned_path }}/secret6" + + - name: Read resultant secret versions + register: kv2_result + vault_ci_kv2_metadata_read: + path: "{{ vault_kv2_versioned_path }}/secret6" + mount_point: '{{ vault_kv2_mount_point }}' + + - assert: + that: + - "'result' in kv2_result" + - "'data' in kv2_result['result']" + - "'versions' in kv2_result['result']['data']" + - "kv2_result['result']['data']['versions']['1']['deletion_time'] is search(regex_secret_version_is_deleted)" + - "kv2_result['result']['data']['versions']['2']['deletion_time'] is search(regex_secret_version_not_deleted)" + - "kv2_result['result']['data']['versions']['3']['deletion_time'] is search(regex_secret_version_is_deleted)" + - "kv2_result['result']['data']['versions']['4']['deletion_time'] is search(regex_secret_version_not_deleted)" + - "kv2_result['result']['data']['versions']['5']['deletion_time'] is search(regex_secret_version_is_deleted)" + fail_msg: 'Result value did not contain expected data.' + + + - name: Success expected when authorized delete on non-existent path (latest version) + register: test_nonexistant + community.hashi_vault.vault_kv2_delete: + path: "{{ vault_kv2_versioned_path }}/non_existent_secret" + + + - name: Success expected when authorized delete on non-existent path (specific version) + register: test_nonexistant + community.hashi_vault.vault_kv2_delete: + path: "{{ vault_kv2_versioned_path }}/non_existent_secret" + versions: + - 1 + + + ### failure tests + + - name: Failure expected when erroneous credentials are used (latest version) + register: test_wrong_cred + community.hashi_vault.vault_kv2_delete: + path: "{{ vault_kv2_versioned_path }}/secret6" + token: wrong_token + ignore_errors: true + + - assert: + that: + - test_wrong_cred is failed + - test_wrong_cred.msg is search('Invalid Vault Token') + fail_msg: "Expected failure but got success or wrong failure message." + + + - name: Failure expected when erroneous credentials are used (specific version) + register: test_wrong_cred + community.hashi_vault.vault_kv2_delete: + path: "{{ vault_kv2_versioned_path }}/secret6" + token: wrong_token + versions: + - 1 + ignore_errors: true + + - assert: + that: + - test_wrong_cred is failed + - test_wrong_cred.msg is search('Invalid Vault Token') + fail_msg: "Expected failure but got success or wrong failure message." + + + - name: Failure expected when unauthorized secret is deleted (latest version) + register: test_unauthorized + community.hashi_vault.vault_kv2_delete: + path: "{{ vault_kv2_path }}/secret3" + ignore_errors: true + + - assert: + that: + - test_unauthorized is failed + - test_unauthorized.msg is search('Permission Denied') + fail_msg: "Expected failure but got success or wrong failure message." + + + - name: Failure expected when unauthorized secret is deleted (specific version) + register: test_unauthorized + community.hashi_vault.vault_kv2_delete: + path: "{{ vault_kv2_path }}/secret3" + versions: + - 1 + ignore_errors: true + + - assert: + that: + - test_unauthorized is failed + - test_unauthorized.msg is search('Permission Denied') + fail_msg: "Expected failure but got success or wrong failure message." diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_get/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_get/aliases new file mode 100644 index 000000000..7636a9a65 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_get/aliases @@ -0,0 +1 @@ +context/target diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_get/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_get/meta/main.yml new file mode 100644 index 000000000..d3acb69e9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_get/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - setup_vault_test_plugins + - setup_vault_configure diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_get/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_get/tasks/main.yml new file mode 100644 index 000000000..e51766353 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_get/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- import_tasks: module_vault_kv2_get_setup.yml +- import_tasks: module_vault_kv2_get_test.yml diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_get/tasks/module_vault_kv2_get_setup.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_get/tasks/module_vault_kv2_get_setup.yml new file mode 100644 index 000000000..012b300f1 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_get/tasks/module_vault_kv2_get_setup.yml @@ -0,0 +1,9 @@ +--- +- name: Configuration tasks + module_defaults: + vault_ci_token_create: '{{ vault_plugins_module_defaults_common }}' + block: + - name: Create a test non-root token + vault_ci_token_create: + policies: [test-policy] + register: user_token_cmd diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_get/tasks/module_vault_kv2_get_test.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_get/tasks/module_vault_kv2_get_test.yml new file mode 100644 index 000000000..8cb4edf0f --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_get/tasks/module_vault_kv2_get_test.yml @@ -0,0 +1,156 @@ +--- +- name: Var block + vars: + user_token: '{{ user_token_cmd.result.auth.client_token }}' + module_defaults: + community.hashi_vault.vault_kv2_get: &defaults + url: '{{ vault_test_server_http }}' + auth_method: token + token: '{{ user_token }}' + token_validate: true + timeout: 5 + block: + - name: Test defualt path value + register: default_path + community.hashi_vault.vault_kv2_get: + path: '{{ vault_kv2_path }}/secret2' + ignore_errors: true + + - assert: + that: + - default_path is failed + - default_path.msg is search('Permission Denied to path') + + - module_defaults: + community.hashi_vault.vault_kv2_get: + <<: *defaults + engine_mount_point: '{{ vault_kv2_mount_point }}' + block: + - name: Check kv2 get + register: kv2_result + community.hashi_vault.vault_kv2_get: + path: "{{ vault_kv2_path }}/secret2" + + - assert: + that: &assert_basics + - "'raw' in kv2_result" + - "'data' in kv2_result" + - "'metadata' in kv2_result" + - "'secret' in kv2_result" + - "'data' in kv2_result['raw']" + - kv2_result['data'] == kv2_result['raw']['data'] + - kv2_result['secret'] == kv2_result['data']['data'] + - kv2_result['metadata'] == kv2_result['data']['metadata'] + fail_msg: 'Return value did not contain expected fields.' + + - name: Check kv2 get (check mode) + register: kv2_result + community.hashi_vault.vault_kv2_get: + path: "{{ vault_kv2_path }}/secret2" + check_mode: true + + - assert: + that: *assert_basics + + - name: Check kv2 versioned (latest) + register: kv2_result + community.hashi_vault.vault_kv2_get: + path: "{{ vault_kv2_versioned_path }}/twover" + + - assert: + that: *assert_basics + + - name: Check version-specific expected values (latest) + vars: &expected_ver2 + expected: + version: 2 + a: two + v: 2 + assert: + that: &assert_expected + - kv2_result.metadata.version == expected.version + - kv2_result.secret.a == expected.a + - kv2_result.secret.v == expected.v + + - name: Check kv2 versioned (2) + register: kv2_result + community.hashi_vault.vault_kv2_get: + path: "{{ vault_kv2_versioned_path }}/twover" + version: 2 + + - assert: + that: *assert_basics + + - name: Check version-specific expected values (2) + vars: *expected_ver2 + assert: + that: *assert_expected + + - name: Check kv2 versioned (1) + register: kv2_result + community.hashi_vault.vault_kv2_get: + path: "{{ vault_kv2_versioned_path }}/twover" + version: 1 + + - assert: + that: *assert_basics + + - name: Check version-specific expected values (1) + vars: + expected: + version: 1 + a: one + v: 1 + assert: + that: *assert_expected + + ### failure tests + + - name: Failure expected when erroneous credentials are used + register: test_wrong_cred + community.hashi_vault.vault_kv2_get: + path: "{{ vault_kv2_path }}/secret2" + token: wrong_token + ignore_errors: true + + - assert: + that: + - test_wrong_cred is failed + - test_wrong_cred.msg is search('Invalid Vault Token') + fail_msg: "Expected failure but got success or wrong failure message." + + - name: Failure expected when unauthorized secret is read + register: test_unauthorized + community.hashi_vault.vault_kv2_get: + path: "{{ vault_kv2_path }}/secret3" + ignore_errors: true + + - assert: + that: + - test_unauthorized is failed + - test_unauthorized.msg is search('Permission Denied') + fail_msg: "Expected failure but got success or wrong failure message." + + - name: Failure expected when inexistent secret is read + register: test_inexistent + community.hashi_vault.vault_kv2_get: + path: "{{ vault_kv2_path }}/non_existent_secret" + ignore_errors: true + + - assert: + that: + - test_inexistent is failed + - test_inexistent.msg is search("missing path") + fail_msg: "Expected failure but got success or wrong failure message." + + - name: Failure expected when reading invalid version + register: test_no_ver + community.hashi_vault.vault_kv2_get: + path: '{{ vault_kv2_versioned_path }}/twover' + version: 3 + ignore_errors: true + + - assert: + that: + - test_no_ver is failed + - test_no_ver.msg is search("Invalid or missing path \\['[^']+'\\] with secret version '3'. Check the path or secret version") diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_write/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_write/aliases new file mode 100644 index 000000000..7636a9a65 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_write/aliases @@ -0,0 +1 @@ +context/target diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_write/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_write/meta/main.yml new file mode 100644 index 000000000..d3acb69e9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_write/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - setup_vault_test_plugins + - setup_vault_configure diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_write/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_write/tasks/main.yml new file mode 100644 index 000000000..382997fa0 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_write/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- import_tasks: setup.yml +- import_tasks: test.yml diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_write/tasks/setup.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_write/tasks/setup.yml new file mode 100644 index 000000000..882b9d195 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_write/tasks/setup.yml @@ -0,0 +1,29 @@ +--- +- name: Configuration tasks + module_defaults: + vault_ci_token_create: "{{ vault_plugins_module_defaults_common }}" + vault_ci_write: "{{ vault_plugins_module_defaults_common }}" + vault_ci_kv2_destroy_all: "{{ vault_plugins_module_defaults_common }}" + block: + - name: Ensure test secrets are deleted + vault_ci_kv2_destroy_all: + path: "{{ item.path }}" + mount_point: "{{ item.mount | default(vault_kv2_mount_point) }}" + loop: + - path: "{{ vault_kv2_path }}/write1" + - path: "{{ vault_kv2_path }}/readonly" + - path: "{{ vault_kv2_path }}/writeonly" + - path: "write1" + mount: "{{ cas_required_vault_kv2_mount_point }}" + + - name: Create a test non-root token + vault_ci_token_create: + policies: [test-policy] + register: user_token_cmd + + - name: Write to readonly + vault_ci_write: + path: "{{ vault_kv2_api_path }}/readonly" + data: + data: + foo: bar diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_write/tasks/test.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_write/tasks/test.yml new file mode 100644 index 000000000..55d6e7a77 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_write/tasks/test.yml @@ -0,0 +1,329 @@ +--- +- name: Var block + vars: + user_token: "{{ user_token_cmd.result.auth.client_token }}" + module_defaults: + community.hashi_vault.vault_kv2_write: + url: "{{ vault_test_server_http }}" + auth_method: token + token: "{{ user_token }}" + token_validate: true + timeout: 5 + engine_mount_point: "{{ vault_kv2_mount_point }}" + vault_ci_read: "{{ vault_plugins_module_defaults_common }}" + block: + - name: Incorrect token + ignore_errors: true + community.hashi_vault.vault_kv2_write: + path: "{{ vault_kv2_path }}/write1" + token: notavalidtoken + data: + foo: bar + register: result + + - assert: + that: + - result is failed + + - name: Test create (check=true) + check_mode: true + community.hashi_vault.vault_kv2_write: &write + path: "{{ vault_kv2_path }}/write1" + data: "{{ write_data0 }}" + read_before_write: true + register: kv2_check + + - assert: + that: + - kv2_check is changed + + - name: Check that the secret doesn't exist + vault_ci_read: &read1 + path: "{{ vault_kv2_api_path }}/write1" + register: ci_read + + - assert: + that: + - ci_read.result == None + + - name: Test create (check=false) + community.hashi_vault.vault_kv2_write: *write + register: result + + - name: Read the secret + vault_ci_read: *read1 + register: ci_read + + - assert: + that: + - ci_read.result.data.data == write_data0 + - result is changed + - "'raw' in result" + + - name: Test create (idempotency) + community.hashi_vault.vault_kv2_write: *write + register: result + + - name: Read the secret + vault_ci_read: *read1 + register: ci_read + + - assert: + that: + - ci_read.result.data.data == write_data0 + - result is not changed + - ci_read.result.data.metadata.version == 1 + + - name: Test write/overwrite (check=true) + check_mode: true + community.hashi_vault.vault_kv2_write: &overwrite + path: "{{ vault_kv2_path }}/write1" + data: "{{ write_data1 }}" + read_before_write: true + register: kv2_check + + - name: Read the secret + vault_ci_read: *read1 + register: ci_read + + - assert: + that: + - kv2_check is changed + - ci_read.result.data.data == write_data0 + - ci_read.result.data.metadata.version == 1 + + - name: Test write/overwrite (check=false) + community.hashi_vault.vault_kv2_write: *overwrite + register: result + + - name: Read the secret + vault_ci_read: *read1 + register: ci_read + + - assert: + that: + - ci_read.result.data.data == write_data1 + - ci_read.result.data.metadata.version == 2 + - result is changed + - "'raw' in result" + + - name: Test write/overwrite (idempotency) + community.hashi_vault.vault_kv2_write: *overwrite + register: result + + - name: Read the secret + vault_ci_read: *read1 + register: ci_read + + - assert: + that: + - ci_read.result.data.data == write_data1 + - result is not changed + - ci_read.result.data.metadata.version == 2 + + - name: Test write secret forbidden (check=true, read=true) + ignore_errors: true + community.hashi_vault.vault_kv2_write: &write_deny_read + path: "{{ vault_kv2_path }}/deny" + data: + foo: bar + read_before_write: true + register: result + + - assert: + that: + - result is failed + - result.msg is search("Permission denied reading") + + - name: Test write secret forbidden (check=false, read=true) + ignore_errors: true + community.hashi_vault.vault_kv2_write: *write_deny_read + register: result + + - assert: + that: + - result is failed + - result.msg is search("Permission denied reading") + + - name: Test write secret forbidden (check=true, read=false) + check_mode: true + community.hashi_vault.vault_kv2_write: &write_deny + path: "{{ vault_kv2_path }}/deny" + data: + foo: bar + register: result + + - assert: + that: + - result is changed + + - name: Test write secret forbidden (check=false, read=false) + ignore_errors: true + community.hashi_vault.vault_kv2_write: *write_deny + register: result + + - assert: + that: + - result is failed + - result.msg is search("Permission denied writing") + + - name: Test write cas (check=true) + check_mode: true + community.hashi_vault.vault_kv2_write: &write_cas + cas: 2 + path: "{{ vault_kv2_path }}/write1" + data: "{{ write_data2 }}" + read_before_write: true + register: result + + - name: Read the secret + vault_ci_read: *read1 + register: ci_read + + - assert: + that: + - result is changed + - ci_read.result.data.data == write_data1 + - ci_read.result.data.metadata.version == 2 + + - name: Test write cas (check=false) + community.hashi_vault.vault_kv2_write: *write_cas + register: result + + - name: Read the secret + vault_ci_read: *read1 + register: ci_read + + - assert: + that: + - result is changed + - ci_read.result.data.data == write_data2 + - ci_read.result.data.metadata.version == 3 + + - name: Test write cas (idempotency) + community.hashi_vault.vault_kv2_write: *write_cas + register: result + + - name: Read the secret + vault_ci_read: *read1 + register: ci_read + + - assert: + that: + - result is not changed + - ci_read.result.data.data == write_data2 + - ci_read.result.data.metadata.version == 3 + + - name: Test write cas wrong value + ignore_errors: true + community.hashi_vault.vault_kv2_write: + path: "{{ vault_kv2_path }}/write1" + cas: 1 + data: + new: data + read_before_write: true + register: result + + - assert: + that: + - result is failed + - result.msg is search("InvalidRequest") + + - name: Test forbidden write + ignore_errors: true + community.hashi_vault.vault_kv2_write: + path: "{{ vault_kv2_path }}/readonly" + data: + key1: val1 + register: result + + - assert: + that: + - result is failed + - result.msg is search("Permission denied writing to") + + - name: Test create on cas_required=true mount + community.hashi_vault.vault_kv2_write: + engine_mount_point: "{{ cas_required_vault_kv2_mount_point }}" + path: write1 + data: "{{ write_data3 }}" + cas: 0 + read_before_write: true + register: result + + - name: Read the secret + vault_ci_read: + path: "{{ cas_required_vault_kv2_mount_point }}/data/write1" + register: ci_read + + - assert: + that: + - result is changed + - ci_read.result.data.data == write_data3 + - ci_read.result.data.metadata.version == 1 + + - name: Test write on write only secret (read=true,check=true) + ignore_errors: true + check_mode: true + community.hashi_vault.vault_kv2_write: + path: "{{ vault_kv2_path }}/writeonly" + read_before_write: true + data: + foo: bar + register: result + + - assert: + that: + - result is failed + + - name: Test write on write only secret (read=true,check=false) + ignore_errors: true + community.hashi_vault.vault_kv2_write: + path: "{{ vault_kv2_path }}/writeonly" + read_before_write: true + data: + foo: bar + register: result + + - assert: + that: + - result is failed + + - name: Test write on write only secret (read=false,check=true) + check_mode: true + community.hashi_vault.vault_kv2_write: + path: "{{ vault_kv2_path }}/writeonly" + data: + foo: bar + register: result + + - assert: + that: + - result is changed + + - name: Read the secret + vault_ci_read: + path: "{{ vault_kv2_api_path }}/writeonly" + register: ci_read + + - assert: + that: + - result is changed + - ci_read.result == None + + - name: Test write on write only secret (read=false,check=false) + community.hashi_vault.vault_kv2_write: + path: "{{ vault_kv2_path }}/writeonly" + data: "{{ write_data3 }}" + register: result + + - name: Read the secret + vault_ci_read: + path: "{{ vault_kv2_api_path }}/writeonly" + register: ci_read + + - assert: + that: + - result is changed + - ci_read.result.data.metadata.version == 1 + - ci_read.result.data.data == write_data3 diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_write/vars/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_write/vars/main.yml new file mode 100644 index 000000000..b56f3df82 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_kv2_write/vars/main.yml @@ -0,0 +1,16 @@ +--- +write_data0: + foo: bar + key1: changeme + +write_data1: + foo: bar + key2: val2 + +write_data2: + foo: bar + key2: value2 + key3: value3 + +write_data3: + foo: bar diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_list/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_list/aliases new file mode 100644 index 000000000..7636a9a65 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_list/aliases @@ -0,0 +1 @@ +context/target diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_list/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_list/meta/main.yml new file mode 100644 index 000000000..d3acb69e9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_list/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - setup_vault_test_plugins + - setup_vault_configure diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_list/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_list/tasks/main.yml new file mode 100644 index 000000000..cd7bd5d5d --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_list/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- import_tasks: module_vault_list_setup.yml +- import_tasks: module_vault_list_test.yml diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_list/tasks/module_vault_list_setup.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_list/tasks/module_vault_list_setup.yml new file mode 100644 index 000000000..193d6fa5e --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_list/tasks/module_vault_list_setup.yml @@ -0,0 +1,9 @@ +--- +- name: Configuration tasks + module_defaults: + vault_ci_token_create: '{{ vault_plugins_module_defaults_common }}' + block: + - name: 'Create a test non-root token' + vault_ci_token_create: + policies: test-policy + register: user_token_cmd diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_list/tasks/module_vault_list_test.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_list/tasks/module_vault_list_test.yml new file mode 100644 index 000000000..64f40d845 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_list/tasks/module_vault_list_test.yml @@ -0,0 +1,100 @@ +--- +- name: Var block + vars: + user_token: '{{ user_token_cmd.result.auth.client_token }}' + module_defaults: + community.hashi_vault.vault_list: + url: '{{ vault_test_server_http }}' + auth_method: token + token: '{{ user_token }}' + token_validate: true + timeout: 5 + block: + - name: 'Check kv2 secret list' + register: kv2_path + community.hashi_vault.vault_list: + path: "{{ vault_kv2_api_list_path }}" + + - assert: + that: + - "'data' in kv2_path" + - "'data' in kv2_path['data']" + - "'keys' in kv2_path['data']['data']" + fail_msg: 'Return value did not contain expected fields.' + + - name: 'Check kv2 mount point list' + register: kv2_mount_point + community.hashi_vault.vault_list: + path: "{{ vault_kv2_api_list_mount_point }}" + + - assert: + that: + - "'data' in kv2_mount_point" + - "'data' in kv2_mount_point['data']" + - "'keys' in kv2_mount_point['data']['data']" + fail_msg: 'Return value did not contain expected fields.' + + ### failure tests + + - name: 'Failure expected when erroneous credentials are used' + register: test_wrong_cred + community.hashi_vault.vault_list: + path: "{{ vault_kv2_api_list_path }}" + token: wrong_token + ignore_errors: true + + - assert: + that: + - test_wrong_cred is failed + - test_wrong_cred.msg is search('Invalid Vault Token') + fail_msg: "Expected failure but got success or wrong failure message." + + - name: 'Failure expected when unauthorized path is listed' + register: test_unauthorized + community.hashi_vault.vault_list: + path: "{{ unauthorized_vault_kv2_mount_point }}" + ignore_errors: true + + - assert: + that: + - test_unauthorized is failed + - test_unauthorized.msg is search('Permission Denied') + fail_msg: "Expected failure but got success or wrong failure message." + + # When an inexistent mount point is listed, the API returns a 403 error, not 404. + - name: 'Failure expected when inexistent mount point is listed' + register: test_inexistent_mount_point + community.hashi_vault.vault_list: + path: "{{ vault_kv2_api_list_inexistent_mount_point }}" + ignore_errors: true + + - assert: + that: + - test_inexistent_mount_point is failed + - test_inexistent_mount_point.msg is search("Permission Denied") + fail_msg: "Expected failure but got success or wrong failure message." + + - name: 'Failure expected when inexistent path is listed' + register: test_inexistent + community.hashi_vault.vault_list: + path: "{{ vault_kv2_api_list_inexistent_path }}" + ignore_errors: true + + - assert: + that: + - test_inexistent is failed + - test_inexistent.msg is search("doesn't seem to exist") + fail_msg: "Expected failure but got success or wrong failure message." + + # If an inexistent path is included in a policy statement that denies access, the list API returns a 403 error. + - name: 'Failure expected when inexistent path is listed but is explicitly mentioned in a policy statement' + register: test_inexistent_unauthorized + community.hashi_vault.vault_list: + path: "{{ vault_kv2_api_list_inexistent_unauthorized_path }}" + ignore_errors: true + + - assert: + that: + - test_inexistent_unauthorized is failed + - test_inexistent_unauthorized.msg is search("Permission Denied") + fail_msg: "Expected failure but got success or wrong failure message." diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_login/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_login/aliases new file mode 100644 index 000000000..7636a9a65 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_login/aliases @@ -0,0 +1 @@ +context/target diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_login/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_login/meta/main.yml new file mode 100644 index 000000000..d3acb69e9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_login/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - setup_vault_test_plugins + - setup_vault_configure diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_login/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_login/tasks/main.yml new file mode 100644 index 000000000..a4978979d --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_login/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- import_tasks: module_vault_login_setup.yml +- import_tasks: module_vault_login_test.yml diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_login/tasks/module_vault_login_setup.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_login/tasks/module_vault_login_setup.yml new file mode 100644 index 000000000..42ec2b1ca --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_login/tasks/module_vault_login_setup.yml @@ -0,0 +1,15 @@ +--- +- name: Configuration tasks + module_defaults: + vault_ci_token_create: '{{ vault_plugins_module_defaults_common }}' + block: + - name: Create a test non-root token + vault_ci_token_create: + policies: test-policy + register: user_token_cmd + + - name: Create a test non-root token with no default policy + vault_ci_token_create: + policies: test-policy + no_default_policy: true + register: user_token_no_default_policy_cmd diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_login/tasks/module_vault_login_test.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_login/tasks/module_vault_login_test.yml new file mode 100644 index 000000000..be8e3acff --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_login/tasks/module_vault_login_test.yml @@ -0,0 +1,75 @@ +--- +- name: Var block + vars: + user_token: '{{ user_token_cmd.result.auth.client_token }}' + module_defaults: + community.hashi_vault.vault_login: + url: '{{ vault_test_server_http }}' + auth_method: token + token: '{{ user_token }}' + timeout: 5 + block: + - name: Login with token + register: result + community.hashi_vault.vault_login: + + - assert: + that: + - "'login' in result" + - "'auth' in result.login" + - "'client_token' in result.login.auth" + - result.login.auth.client_token == user_token + - "'policies' in result.login.auth" + - "'test-policy' in result.login.auth.policies" + - "'default' in result.login.auth.policies" + + - name: Login with token without validation + register: result + community.hashi_vault.vault_login: + token_validate: false + + - assert: + that: + - "'login' in result" + - "'auth' in result.login" + - "'client_token' in result.login.auth" + - result.login.auth.client_token == user_token + - "'policies' not in result.login.auth" + + - name: Try a login in check mode + register: result + community.hashi_vault.vault_login: + check_mode: yes + + - assert: + that: + - result is not changed # because of token auth not because of check mode + - "'login' in result" + - "'auth' in result.login" + - "'client_token' in result.login.auth" + - result.login.auth.client_token == None # no token returned in check mode, just an empty structure + + + ### failure tests + + - name: none auth method is not supported + register: none_result + community.hashi_vault.vault_login: + auth_method: none + ignore_errors: true + + - assert: + that: + - none_result is failed + - none_result.msg is search("The 'none' auth method is not valid for this module.") + + - name: Try to lookup-self without permission + register: lookup_result + community.hashi_vault.vault_login: + token: '{{ user_token_no_default_policy_cmd.result.auth.client_token }}' + ignore_errors: true + + - assert: + that: + - lookup_result is failed + - lookup_result.msg is search('Invalid Vault Token Specified') diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_pki_generate_certificate/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_pki_generate_certificate/aliases new file mode 100644 index 000000000..7636a9a65 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_pki_generate_certificate/aliases @@ -0,0 +1 @@ +context/target diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_pki_generate_certificate/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_pki_generate_certificate/meta/main.yml new file mode 100644 index 000000000..2e9e47004 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_pki_generate_certificate/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - setup_vault_configure + - setup_vault_configure_engine_pki diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_pki_generate_certificate/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_pki_generate_certificate/tasks/main.yml new file mode 100644 index 000000000..beb61006c --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_pki_generate_certificate/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- import_tasks: module_vault_pki_generate_certificate_setup.yml +- import_tasks: module_vault_pki_generate_certificate_test.yml diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_pki_generate_certificate/tasks/module_vault_pki_generate_certificate_setup.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_pki_generate_certificate/tasks/module_vault_pki_generate_certificate_setup.yml new file mode 100644 index 000000000..793470a09 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_pki_generate_certificate/tasks/module_vault_pki_generate_certificate_setup.yml @@ -0,0 +1,9 @@ +--- +- name: Configuration tasks + module_defaults: + vault_ci_token_create: '{{ vault_plugins_module_defaults_common }}' + block: + - name: Create a test non-root token + vault_ci_token_create: + policies: test-pki-policy + register: user_token_cmd diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_pki_generate_certificate/tasks/module_vault_pki_generate_certificate_test.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_pki_generate_certificate/tasks/module_vault_pki_generate_certificate_test.yml new file mode 100644 index 000000000..5543a7c9f --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_pki_generate_certificate/tasks/module_vault_pki_generate_certificate_test.yml @@ -0,0 +1,41 @@ +--- +- name: Var block + vars: + user_token: '{{ user_token_cmd.result.auth.client_token }}' + module_defaults: + community.hashi_vault.vault_pki_generate_certificate: + url: '{{ vault_test_server_http }}' + auth_method: token + token: '{{ user_token }}' + timeout: 5 + block: + - name: Generate a throwaway certificate + register: cert_data + community.hashi_vault.vault_pki_generate_certificate: + role_name: test.example.org + common_name: throwaway.test.example.org + alt_names: + - throwaway2.test.example.org + - throwaway3.test.example.org + + - assert: + that: + - cert_data is changed + - "'data' in cert_data" + - "'data' in cert_data['data']" + - "'certificate' in cert_data['data']['data']" + fail_msg: Return value did not contain expected fields. + + - name: Generate certificate (check mode) + register: result + community.hashi_vault.vault_pki_generate_certificate: + role_name: test.example.org + common_name: throwaway.test.example.org + check_mode: true + + - assert: + that: + - result is changed + - "'data' in result" + - result.data == {} + fail_msg: "Unexpected result from check mode: {{ result }}" diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_read/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_read/aliases new file mode 100644 index 000000000..7636a9a65 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_read/aliases @@ -0,0 +1 @@ +context/target diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_read/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_read/meta/main.yml new file mode 100644 index 000000000..d3acb69e9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_read/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - setup_vault_test_plugins + - setup_vault_configure diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_read/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_read/tasks/main.yml new file mode 100644 index 000000000..c9db6b92b --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_read/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- import_tasks: module_vault_read_setup.yml +- import_tasks: module_vault_read_test.yml diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_read/tasks/module_vault_read_setup.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_read/tasks/module_vault_read_setup.yml new file mode 100644 index 000000000..193d6fa5e --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_read/tasks/module_vault_read_setup.yml @@ -0,0 +1,9 @@ +--- +- name: Configuration tasks + module_defaults: + vault_ci_token_create: '{{ vault_plugins_module_defaults_common }}' + block: + - name: 'Create a test non-root token' + vault_ci_token_create: + policies: test-policy + register: user_token_cmd diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_read/tasks/module_vault_read_test.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_read/tasks/module_vault_read_test.yml new file mode 100644 index 000000000..55c27d435 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_read/tasks/module_vault_read_test.yml @@ -0,0 +1,63 @@ +--- +- name: Var block + vars: + user_token: '{{ user_token_cmd.result.auth.client_token }}' + module_defaults: + community.hashi_vault.vault_read: + url: '{{ vault_test_server_http }}' + auth_method: token + token: '{{ user_token }}' + token_validate: true + timeout: 5 + block: + - name: 'Check kv2 secret read' + register: kv2_secret2 + community.hashi_vault.vault_read: + path: "{{ vault_kv2_api_path }}/secret2" + + - assert: + that: + - "'data' in kv2_secret2" + - "'data' in kv2_secret2['data']" + - "'data' in kv2_secret2['data']['data']" + - "'metadata' in kv2_secret2['data']['data']" + fail_msg: 'Return value did not contain expected fields.' + + ### failure tests + + - name: 'Failure expected when erroneous credentials are used' + register: test_wrong_cred + community.hashi_vault.vault_read: + path: "{{ vault_kv2_api_path }}/secret2" + token: wrong_token + ignore_errors: true + + - assert: + that: + - test_wrong_cred is failed + - test_wrong_cred.msg is search('Invalid Vault Token') + fail_msg: "Expected failure but got success or wrong failure message." + + - name: 'Failure expected when unauthorized secret is read' + register: test_unauthorized + community.hashi_vault.vault_read: + path: "{{ vault_kv2_api_path }}/secret3" + ignore_errors: true + + - assert: + that: + - test_unauthorized is failed + - test_unauthorized.msg is search('Permission Denied') + fail_msg: "Expected failure but got success or wrong failure message." + + - name: 'Failure expected when inexistent secret is read' + register: test_inexistent + community.hashi_vault.vault_read: + path: "{{ vault_kv2_api_path }}/non_existent_secret" + ignore_errors: true + + - assert: + that: + - test_inexistent is failed + - test_inexistent.msg is search("doesn't seem to exist") + fail_msg: "Expected failure but got success or wrong failure message." diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_token_create/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_token_create/aliases new file mode 100644 index 000000000..7636a9a65 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_token_create/aliases @@ -0,0 +1 @@ +context/target diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_token_create/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_token_create/meta/main.yml new file mode 100644 index 000000000..d3acb69e9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_token_create/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - setup_vault_test_plugins + - setup_vault_configure diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_token_create/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_token_create/tasks/main.yml new file mode 100644 index 000000000..6cc2b7cea --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_token_create/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- import_tasks: module_vault_token_create_setup.yml +- import_tasks: module_vault_token_create_test.yml diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_token_create/tasks/module_vault_token_create_setup.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_token_create/tasks/module_vault_token_create_setup.yml new file mode 100644 index 000000000..b75bd313c --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_token_create/tasks/module_vault_token_create_setup.yml @@ -0,0 +1,18 @@ +--- +- name: Configuration tasks + module_defaults: + vault_ci_token_create: '{{ vault_plugins_module_defaults_common }}' + block: + - name: Create a token that can create child tokens + vault_ci_token_create: + policies: + - test-policy + - token-creator + register: child_token_cmd + + - name: Create a token that can create orphan tokens + vault_ci_token_create: + policies: + - test-policy + - orphan-creator + register: orphan_token_cmd diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_token_create/tasks/module_vault_token_create_test.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_token_create/tasks/module_vault_token_create_test.yml new file mode 100644 index 000000000..67e9981ea --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_token_create/tasks/module_vault_token_create_test.yml @@ -0,0 +1,178 @@ +--- +- name: Defaults block + module_defaults: + community.hashi_vault.vault_token_create: + url: '{{ vault_test_server_http }}' + auth_method: token + token: '{{ user_token }}' + timeout: 5 + ttl: 5m + block: + - name: Test with a child token creator + vars: + user_token: '{{ child_token_cmd.result.auth.client_token }}' + block: + - name: Create a child token + register: result + community.hashi_vault.vault_token_create: + + - assert: + that: + - result is changed + - "'login' in result" + - "'auth' in result.login" + - "'client_token' in result.login.auth" + - result.login.auth.client_token != user_token + - "'policies' in result.login.auth" + - "'test-policy' in result.login.auth.policies" + - "'default' in result.login.auth.policies" + - result.login.auth.orphan == False + + - name: (xfail) Create an orphan token with orphan=true + register: orphan_result + community.hashi_vault.vault_token_create: + orphan: true + ignore_errors: true + + - assert: + that: + - orphan_result is failed + - orphan_result.msg is search('permission denied') + + - name: (xfail) Create an orphan token with no_parent=true + register: no_parent_result + community.hashi_vault.vault_token_create: + no_parent: true + ignore_errors: true + + - assert: + that: + - no_parent_result is failed + - no_parent_result.msg is search('root or sudo privileges required to create orphan token') + + - name: Test with a orphan token creator + vars: + user_token: '{{ orphan_token_cmd.result.auth.client_token }}' + block: + - name: Create a child token + register: result + community.hashi_vault.vault_token_create: + + - assert: + that: + - result is changed + - "'login' in result" + - "'auth' in result.login" + - "'client_token' in result.login.auth" + - result.login.auth.client_token != user_token + - "'policies' in result.login.auth" + - "'test-policy' in result.login.auth.policies" + - "'default' in result.login.auth.policies" + - result.login.auth.orphan == False + + - name: Create an orphan token with orphan=true + register: result + community.hashi_vault.vault_token_create: + orphan: true + + - assert: + that: + - result is changed + - "'login' in result" + - "'auth' in result.login" + - "'client_token' in result.login.auth" + - result.login.auth.client_token != user_token + - "'policies' in result.login.auth" + - "'test-policy' in result.login.auth.policies" + - "'default' in result.login.auth.policies" + - result.login.auth.orphan == True + + - name: (xfail) Create an orphan token with no_parent=true + register: no_parent_result + community.hashi_vault.vault_token_create: + no_parent: true + ignore_errors: true + + - assert: + that: + - no_parent_result is failed + - no_parent_result.msg is search('root or sudo privileges required to create orphan token') + + - name: Test with a root token + vars: + user_token: '{{ vault_dev_root_token_id }}' + block: + - name: Create a child token + register: result + community.hashi_vault.vault_token_create: + policies: [test-policy] + + - assert: + that: + - result is changed + - "'login' in result" + - "'auth' in result.login" + - "'client_token' in result.login.auth" + - result.login.auth.client_token != user_token + - "'policies' in result.login.auth" + - "'test-policy' in result.login.auth.policies" + - "'default' in result.login.auth.policies" + - result.login.auth.orphan == False + + - name: Create an orphan token with orphan=true + register: result + community.hashi_vault.vault_token_create: + policies: [test-policy] + orphan: true + + - assert: + that: + - result is changed + - "'login' in result" + - "'auth' in result.login" + - "'client_token' in result.login.auth" + - result.login.auth.client_token != user_token + - "'policies' in result.login.auth" + - "'test-policy' in result.login.auth.policies" + - "'default' in result.login.auth.policies" + - result.login.auth.orphan == True + + - name: Create an orphan token with no_parent=true + register: result + community.hashi_vault.vault_token_create: + policies: [test-policy] + no_parent: true + + - assert: + that: + - result is changed + - "'login' in result" + - "'auth' in result.login" + - "'client_token' in result.login.auth" + - result.login.auth.client_token != user_token + - "'policies' in result.login.auth" + - "'test-policy' in result.login.auth.policies" + - "'default' in result.login.auth.policies" + - result.login.auth.orphan == True + + - name: Test check mode + register: result + community.hashi_vault.vault_token_create: + id: static_token + check_mode: true + + - assert: + that: + - result is changed + - "'login' in result" + - "'auth' in result.login" + - "'client_token' in result.login.auth" + - result.login.auth.client_token != 'static_token' + - result.login.auth.client_token == None + - >- + lookup('vault_test_auth', + token='static_token', + token_validate=true, + url=vault_test_server_http, + want_exception=True + ) is failed diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_write/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_write/aliases new file mode 100644 index 000000000..7636a9a65 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_write/aliases @@ -0,0 +1 @@ +context/target diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_write/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_write/meta/main.yml new file mode 100644 index 000000000..290705e5e --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_write/meta/main.yml @@ -0,0 +1,3 @@ +--- +dependencies: + - setup_vault_test_plugins diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_write/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_write/tasks/main.yml new file mode 100644 index 000000000..36a8a9203 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_write/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- import_tasks: module_vault_write_setup.yml +- import_tasks: module_vault_write_test.yml diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_write/tasks/module_vault_write_setup.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_write/tasks/module_vault_write_setup.yml new file mode 100644 index 000000000..53cdbe304 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_write/tasks/module_vault_write_setup.yml @@ -0,0 +1,12 @@ +--- +- name: Configuration tasks + module_defaults: + vault_ci_token_create: '{{ vault_plugins_module_defaults_common }}' + block: + - name: 'Create a test non-root token' + vault_ci_token_create: + # we don't need test policy, but if we don't put something here + # it will inherit the root token's capabilities which is what + # we're trying to avoid. + policies: test-policy + register: user_token_cmd diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_write/tasks/module_vault_write_test.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_write/tasks/module_vault_write_test.yml new file mode 100644 index 000000000..244b8e29a --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/module_vault_write/tasks/module_vault_write_test.yml @@ -0,0 +1,106 @@ +--- +- name: Var block + vars: + user_token: '{{ user_token_cmd.result.auth.client_token }}' + module_defaults: + vault_ci_read: &defaults + url: '{{ vault_test_server_http }}' + token: '{{ user_token }}' + community.hashi_vault.vault_write: + <<: *defaults + auth_method: token + token_validate: true + timeout: 5 + block: + - name: Write data to the cubbyhole (check mode) + register: result + check_mode: true + community.hashi_vault.vault_write: + path: cubbyhole/secret1 + data: + a: 1 + b: two + + - assert: + that: + - result is changed + - result.data == {} + + - name: Check that written data does not exist + register: result + vault_ci_read: + path: cubbyhole/secret1 + + - assert: + that: + - result.result == None + + - name: Write data to the cubbyhole + register: result + community.hashi_vault.vault_write: + path: cubbyhole/secret1 + data: + a: 1 + b: two + + - assert: + that: + - result is changed + - result.data == {} + + - name: Check that written data exists + register: result + vault_ci_read: + path: cubbyhole/secret1 + + - assert: + that: + - "'result' in result" + - "'data' in result.result" + - "result.result.data == {'a': 1, 'b': 'two'}" + + - name: Write data to an endpoint that returns data and test wrapping + register: result + community.hashi_vault.vault_write: + path: sys/wrapping/wrap + wrap_ttl: 5m + data: + program: kif1 + + - assert: + that: + - result is changed + - "'data' in result" + - "'wrap_info' in result.data" + - result.data.wrap_info.ttl == 300 + + ### failure tests + + - name: Failure expected when erroneous credentials are used + register: test_wrong_cred + community.hashi_vault.vault_write: + path: "cubbyhole/secret2" + token: wrong_token + ignore_errors: true + + - assert: + that: + - test_wrong_cred is failed + - test_wrong_cred.msg is search('Invalid Vault Token') + fail_msg: "Expected failure but got success or wrong failure message." + + - name: Failure expected when unauthorized path is written to + register: test_unauthorized + community.hashi_vault.vault_write: + path: "sys/audit/file" + data: + type: file + options: + file_path: /dev/null + ignore_errors: true + + - assert: + that: + - test_unauthorized is failed + - test_unauthorized.msg is search('Permission Denied') + fail_msg: "Expected failure but got success or wrong failure message." diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_cert_content/README.md b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_cert_content/README.md new file mode 100644 index 000000000..48c2b10ba --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_cert_content/README.md @@ -0,0 +1,2 @@ +# `setup_cert_content` +Tiny role used for writing out the certificate when it was supplied as a string in vars (usually from `integration_config.yml`). Any target testing TLS connectivity that needs to verify the cert will need this. diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_cert_content/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_cert_content/aliases new file mode 100644 index 000000000..136c05e0d --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_cert_content/aliases @@ -0,0 +1 @@ +hidden diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_cert_content/defaults/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_cert_content/defaults/main.yml new file mode 100644 index 000000000..29f4aff6e --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_cert_content/defaults/main.yml @@ -0,0 +1,4 @@ +--- +cert_output_dir: '{{ role_path }}' +cert_file_name: cert.pem +cert_location: '{{ cert_output_dir }}/{{ cert_file_name }}' diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_cert_content/files/.gitignore b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_cert_content/files/.gitignore new file mode 100644 index 000000000..d6b7ef32c --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_cert_content/files/.gitignore @@ -0,0 +1,2 @@ +* +!.gitignore diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_cert_content/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_cert_content/tasks/main.yml new file mode 100644 index 000000000..79c847b74 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_cert_content/tasks/main.yml @@ -0,0 +1,12 @@ +--- +- name: "Cert Content Block" + when: vault_cert_content is defined + block: + - name: "Write Certificate" + copy: + dest: '{{ cert_location }}' + content: '{{ vault_cert_content }}' + + - name: "Register the Cert Location" + set_fact: + vault_cert_file: '{{ cert_location }}' diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/README.md b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/README.md new file mode 100644 index 000000000..934df7acc --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/README.md @@ -0,0 +1,7 @@ +# `setup_localenv_docker` +Uses `docker-compose` to set up required external dependencies for integration tests. + +See the guides in the [devel documentation for the latest information](https://docs.ansible.com/ansible/devel/collections/community/hashi_vault/). + +## Notes +* For requirements, see the files in `files/requirements/`. diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/aliases new file mode 100644 index 000000000..fbc7f5062 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/aliases @@ -0,0 +1,2 @@ +hidden +needs/target/setup_vault_server_cert diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/defaults/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/defaults/main.yml new file mode 100644 index 000000000..d17c895f9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/defaults/main.yml @@ -0,0 +1,38 @@ +--- +vault_version: latest +vault_dev_root_token_id: 47542cbc-6bf8-4fba-8eda-02e0a0d29a0a + +docker_compose: clean +# clean - down, then up +# up - bring up the configuration +# down - destroy the configuration +# none - do not take any docker actions (templating of docker-compose.yml still happens) + +docker_compose_project_name: hashi_vault + +vault_port_http: 8200 +vault_port_https: 8300 +vault_container_name: vault +vault_target_name: '{{ vault_container_name }}' + +proxy_port: 8888 +proxy_container_name: tinyproxy +proxy_target_name: '{{ proxy_container_name }}' + +mmock_server_port: 8900 +mmock_console_port: 8901 +mmock_container_name: mmock +mmock_target_name: '{{ mmock_container_name }}' +mmock_config_path: '{{ output_dir }}/mmock_config' + +output_dir: '{{ role_path }}/files/.output' + +docker_compose_output: '{{ output_dir }}/{{ docker_compose_project_name }}' +docker_compose_file: '{{ docker_compose_output }}/docker-compose.yml' + +vault_config_output: '{{ output_dir }}/vault_config' + +vault_cert_file: '{{ vault_config_output }}/cert.pem' +vault_key_file: '{{ vault_config_output }}/key.pem' + +vault_crypto_force: false diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/files/.output/.gitignore b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/files/.output/.gitignore new file mode 100644 index 000000000..d6b7ef32c --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/files/.output/.gitignore @@ -0,0 +1,2 @@ +* +!.gitignore diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/files/playbooks/vault_docker.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/files/playbooks/vault_docker.yml new file mode 100644 index 000000000..6f6ae5ab9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/files/playbooks/vault_docker.yml @@ -0,0 +1,5 @@ +--- +- hosts: localhost + gather_facts: no + roles: + - setup_localenv_docker diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/files/requirements/constraints.txt b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/files/requirements/constraints.txt new file mode 100644 index 000000000..16f5a6645 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/files/requirements/constraints.txt @@ -0,0 +1,2 @@ +docker >= 5.0.0 ; python_version >= '3.6' +docker < 5.0.0 ; python_version == '2.7' diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/files/requirements/requirements.txt b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/files/requirements/requirements.txt new file mode 100644 index 000000000..de536a9e5 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/files/requirements/requirements.txt @@ -0,0 +1,3 @@ +docker +docker-compose +six # https://github.com/ansible-collections/community.docker/issues/171 diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/files/requirements/requirements.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/files/requirements/requirements.yml new file mode 100644 index 000000000..8114e6169 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/files/requirements/requirements.yml @@ -0,0 +1,6 @@ +--- +collections: + # community.docker is not required if using docker_compose=none + - community.docker + # community.crypto is not required the certificate and key files specified already exist + - community.crypto diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/setup.sh b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/setup.sh new file mode 100755 index 000000000..f2fab1d8b --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/setup.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash + +set -ex + +pushd "${BASH_SOURCE%/*}" + +ANSIBLE_ROLES_PATH="../" \ + ansible-playbook files/playbooks/vault_docker.yml "${@}" + +# copy generated integration_config.yml if it doesn't exist +cp -n files/.output/integration_config.yml ../../ || true + +popd diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/tasks/docker.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/tasks/docker.yml new file mode 100644 index 000000000..dc7e0ea63 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/tasks/docker.yml @@ -0,0 +1,13 @@ +--- +- name: "Shut down" + when: docker_compose in ['clean', 'down'] + community.docker.docker_compose: + project_name: '{{ docker_compose_project_name }}' + state: absent + project_src: '{{ docker_compose_output }}' + +- name: "Bring up" + when: docker_compose in ['clean', 'up'] + community.docker.docker_compose: + project_name: '{{ docker_compose_project_name }}' + project_src: '{{ docker_compose_output }}' diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/tasks/main.yml new file mode 100644 index 000000000..90223f1e4 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/tasks/main.yml @@ -0,0 +1,70 @@ +--- +- name: "Ensure output dirs exist" + file: + state: directory + path: '{{ item }}' + loop: + - '{{ docker_compose_output }}' + - '{{ vault_config_output }}' + - '{{ mmock_config_path }}' + +- name: "Create the docker-compose definition" + template: + src: docker-compose.yml.j2 + dest: '{{ docker_compose_file }}' + +- name: "Persist vars (role_path is relative)" + set_fact: + vault_cert_file: '{{ vault_cert_file }}' + vault_key_file: '{{ vault_key_file }}' + +- name: "Check if cert already exists" + stat: + path: '{{ vault_cert_file }}' + follow: yes + get_attributes: no + get_checksum: no + get_mime: no + register: cert_status + +- name: "Check if key already exists" + stat: + path: '{{ vault_key_file }}' + follow: yes + get_attributes: no + get_checksum: no + get_mime: no + register: key_status + +- name: "Generate certs" + when: >- + vault_crypto_force | bool + or not (key_status.stat.exists and cert_status.stat.exists) + include_role: + name: setup_vault_server_cert + vars: + vault_dns_names: '{{ [vault_target_name, vault_container_name] | unique }}' + +- name: "Template vault config" + template: + src: vault_config.hcl.j2 + dest: '{{ vault_config_output }}/vault_config.hcl' + +- name: "Template mmock configs" + loop: "{{ query('fileglob', role_path ~ '/templates/mmock/*.j2') }}" + loop_control: + label: '{{ dest_name }}' + vars: + dest_name: '{{ item | basename | splitext | first }}' + template: + src: '{{ item }}' + dest: '{{ mmock_config_path }}/{{ dest_name }}' + +- include_tasks: docker.yml + when: docker_compose != 'none' + +- name: "Template integration_config" + template: + src: integration_config.yml.j2 + dest: '{{ output_dir }}/integration_config.yml' + force: yes diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/docker-compose.yml.j2 b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/docker-compose.yml.j2 new file mode 100644 index 000000000..365532c4c --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/docker-compose.yml.j2 @@ -0,0 +1,32 @@ +# {{ ansible_managed }} +--- +version: '3' +services: + vault: + image: 'vault:{{ vault_version }}' + container_name: '{{ vault_container_name }}' + ports: + - '{{ vault_port_http }}:{{ vault_port_http }}' + - '{{ vault_port_https }}:{{ vault_port_https }}' + volumes: + - '{{ vault_config_output }}:/vault/config' + environment: + VAULT_DEV_ROOT_TOKEN_ID: '{{ vault_dev_root_token_id }}' + SKIP_CHOWN: 1 + tinyproxy: + image: 'monokal/tinyproxy' + container_name: '{{ proxy_container_name }}' + ports: + - '{{ proxy_port }}:{{ proxy_port }}' + command: ANY + mmock: + image: jordimartin/mmock + container_name: '{{ mmock_container_name }}' + ports: + - '{{ mmock_server_port }}:{{ mmock_server_port }}' + - '{{ mmock_console_port }}:{{ mmock_console_port }}' + volumes: + - '{{ mmock_config_path }}:/config' + command: >- + -console-port {{ mmock_console_port }} + -server-port {{ mmock_server_port }} diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/integration_config.yml.j2 b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/integration_config.yml.j2 new file mode 100644 index 000000000..4add1f4ed --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/integration_config.yml.j2 @@ -0,0 +1,3 @@ +# {{ ansible_managed }} +--- +{{ integration_config | to_nice_yaml }} diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/aws_iam_login_alt_mount.yml.j2 b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/aws_iam_login_alt_mount.yml.j2 new file mode 100644 index 000000000..fe580ec17 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/aws_iam_login_alt_mount.yml.j2 @@ -0,0 +1,43 @@ +#jinja2:variable_start_string:'[%', variable_end_string:'%]' +--- +request: + method: POST|PUT + path: "/v1/auth/aws-alt/login" +control: + priority: 10 +response: + statusCode: 200 + headers: + Content-Type: + - application/json + body: >- + { + "request_id": "{{fake.UUID}}", + "lease_id": "", + "lease_duration": 0, + "renewable": false, + "data": null, + "warnings": null, + "auth": { + "client_token": "s.{{fake.CharactersN(24)}}", + "accessor": "{{fake.CharactersN(24)}}", + "policies": [ + "default", + "aws-alt-sample-policy" + ], + "token_policies": [ + "default", + "aws-alt-sample-policy" + ], + "identity_policies": null, + "metadata": { + "account_id": "{{fake.digitsN(12)}}", + "auth_type": "iam", + "role_id": "{{fake.UUID}}" + }, + "orphan": true, + "entity_id": "{{fake.UUID}}", + "lease_duration": 1800, + "renewable": true + } + } diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/aws_iam_login_bad_request.yml.j2 b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/aws_iam_login_bad_request.yml.j2 new file mode 100644 index 000000000..94ec11066 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/aws_iam_login_bad_request.yml.j2 @@ -0,0 +1,25 @@ +#jinja2:variable_start_string:'[%', variable_end_string:'%]' +--- +request: + method: POST|PUT + path: "/v1/auth/aws*/login" + body: '*fail-me-role*' +control: + priority: 11 +response: + statusCode: 400 + headers: + Content-Type: + - application/json + body: >- + { + "error": "error making upstream request: received error code 403 from STS: + <ErrorResponse xmlns="https://sts.amazonaws.com/doc/2011-06-15/"> + <Error> + <Type>Sender</Type> + <Code>ExpiredToken</Code> + <Message>The security token included in the request is expired</Message> + </Error> + <RequestId>{{fake.UUID}}</RequestId> + </ErrorResponse>" + } diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/aws_iam_login_default_mount.yml.j2 b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/aws_iam_login_default_mount.yml.j2 new file mode 100644 index 000000000..fa8160e8e --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/aws_iam_login_default_mount.yml.j2 @@ -0,0 +1,43 @@ +#jinja2:variable_start_string:'[%', variable_end_string:'%]' +--- +request: + method: POST|PUT + path: "/v1/auth/aws/login" +control: + priority: 10 +response: + statusCode: 200 + headers: + Content-Type: + - application/json + body: >- + { + "request_id": "{{fake.UUID}}", + "lease_id": "", + "lease_duration": 0, + "renewable": false, + "data": null, + "warnings": null, + "auth": { + "client_token": "s.{{fake.CharactersN(24)}}", + "accessor": "{{fake.CharactersN(24)}}", + "policies": [ + "default", + "aws-sample-policy" + ], + "token_policies": [ + "default", + "aws-sample-policy" + ], + "identity_policies": null, + "metadata": { + "account_id": "{{fake.digitsN(12)}}", + "auth_type": "iam", + "role_id": "{{fake.UUID}}" + }, + "orphan": true, + "entity_id": "{{fake.UUID}}", + "lease_duration": 1800, + "renewable": true + } + } diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/azure_login_alt_mount.yml.j2 b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/azure_login_alt_mount.yml.j2 new file mode 100644 index 000000000..b1588fd6e --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/azure_login_alt_mount.yml.j2 @@ -0,0 +1,46 @@ +#jinja2:variable_start_string:'[%', variable_end_string:'%]' +# Copyright (c) 2022 Junrui Chen (@jchenship) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later +--- +request: + method: POST|PUT + path: "/v1/auth/azure-alt/login" +control: + priority: 10 +response: + statusCode: 200 + headers: + Content-Type: + - application/json + body: >- + { + "request_id": "{{fake.UUID}}", + "lease_id": "", + "lease_duration": 0, + "renewable": false, + "data": null, + "warnings": null, + "auth": { + "client_token": "s.{{fake.CharactersN(24)}}", + "accessor": "{{fake.CharactersN(24)}}", + "policies": [ + "default", + "azure-alt-sample-policy" + ], + "token_policies": [ + "default", + "azure-alt-sample-policy" + ], + "identity_policies": null, + "metadata": { + "role": "vault-role", + "resource_group_name": "", + "subscription_id": "" + }, + "orphan": true, + "entity_id": "{{fake.UUID}}", + "lease_duration": 1800, + "renewable": true + } + } diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/azure_login_bad_request.yml.j2 b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/azure_login_bad_request.yml.j2 new file mode 100644 index 000000000..d447dd015 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/azure_login_bad_request.yml.j2 @@ -0,0 +1,22 @@ +#jinja2:variable_start_string:'[%', variable_end_string:'%]' +# Copyright (c) 2022 Junrui Chen (@jchenship) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later +--- +request: + method: POST|PUT + path: "/v1/auth/azure*/login" + body: '*fail-me-role*' +control: + priority: 11 +response: + statusCode: 400 + headers: + Content-Type: + - application/json + body: >- + { + "errors": [ + "oidc: expected audience \"https://management.azure.com/\" got [\"https://management.azure.com\"]" + ] + } diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/azure_login_default_mount.yml.j2 b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/azure_login_default_mount.yml.j2 new file mode 100644 index 000000000..af26ada83 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/azure_login_default_mount.yml.j2 @@ -0,0 +1,46 @@ +#jinja2:variable_start_string:'[%', variable_end_string:'%]' +# Copyright (c) 2022 Junrui Chen (@jchenship) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later +--- +request: + method: POST|PUT + path: "/v1/auth/azure/login" +control: + priority: 10 +response: + statusCode: 200 + headers: + Content-Type: + - application/json + body: >- + { + "request_id": "{{fake.UUID}}", + "lease_id": "", + "lease_duration": 0, + "renewable": false, + "data": null, + "warnings": null, + "auth": { + "client_token": "s.{{fake.CharactersN(24)}}", + "accessor": "{{fake.CharactersN(24)}}", + "policies": [ + "default", + "azure-sample-policy" + ], + "token_policies": [ + "default", + "azure-sample-policy" + ], + "identity_policies": null, + "metadata": { + "role": "vault-role", + "resource_group_name": "", + "subscription_id": "" + }, + "orphan": true, + "entity_id": "{{fake.UUID}}", + "lease_duration": 1800, + "renewable": true + } + } diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/ldap_login_alt_mount.yml.j2 b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/ldap_login_alt_mount.yml.j2 new file mode 100644 index 000000000..30b3e7074 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/ldap_login_alt_mount.yml.j2 @@ -0,0 +1,41 @@ +#jinja2:variable_start_string:'[%', variable_end_string:'%]' +--- +request: + method: POST|PUT + path: "/v1/auth/ldap-alt/login/:user" +control: + priority: 10 +response: + statusCode: 200 + headers: + Content-Type: + - application/json + body: >- + { + "request_id": "{{fake.UUID}}", + "lease_id": "", + "lease_duration": 0, + "renewable": false, + "data": {}, + "warnings": null, + "auth": { + "client_token": "s.{{fake.CharactersN(24)}}", + "accessor": "{{fake.CharactersN(24)}}", + "policies": [ + "default", + "ldap-alt-sample-policy" + ], + "token_policies": [ + "default", + "ldap-alt-sample-policy" + ], + "identity_policies": null, + "metadata": { + "username": "{{request.path.user}}" + }, + "orphan": true, + "entity_id": "{{fake.UUID}}", + "lease_duration": 3600, + "renewable": true + } + } diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/ldap_login_bad_request.yml.j2 b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/ldap_login_bad_request.yml.j2 new file mode 100644 index 000000000..7f4fe39c5 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/ldap_login_bad_request.yml.j2 @@ -0,0 +1,18 @@ +#jinja2:variable_start_string:'[%', variable_end_string:'%]' +--- +request: + method: POST|PUT + path: "/v1/auth/ldap*/login/fail-me-username" +control: + priority: 11 +response: + statusCode: 400 + headers: + Content-Type: + - application/json + body: >- + { + "errors": [ + "ldap operation failed: failed to bind as user" + ] + } diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/ldap_login_default_mount.yml.j2 b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/ldap_login_default_mount.yml.j2 new file mode 100644 index 000000000..8210f6065 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/ldap_login_default_mount.yml.j2 @@ -0,0 +1,41 @@ +#jinja2:variable_start_string:'[%', variable_end_string:'%]' +--- +request: + method: POST|PUT + path: "/v1/auth/ldap/login/:user" +control: + priority: 10 +response: + statusCode: 200 + headers: + Content-Type: + - application/json + body: >- + { + "request_id": "{{fake.UUID}}", + "lease_id": "", + "lease_duration": 0, + "renewable": false, + "data": {}, + "warnings": null, + "auth": { + "client_token": "s.{{fake.CharactersN(24)}}", + "accessor": "{{fake.CharactersN(24)}}", + "policies": [ + "default", + "ldap-sample-policy" + ], + "token_policies": [ + "default", + "ldap-sample-policy" + ], + "identity_policies": null, + "metadata": { + "username": "{{request.path.user}}" + }, + "orphan": true, + "entity_id": "{{fake.UUID}}", + "lease_duration": 3600, + "renewable": true + } + } diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/proxy.yml.j2 b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/proxy.yml.j2 new file mode 100644 index 000000000..f947a3897 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/mmock/proxy.yml.j2 @@ -0,0 +1,7 @@ +--- +request: + method: 'GET|HEAD|POST|PUT|DELETE|OPTIONS' + path: '/v1/*' +control: + priority: 1 + proxyBaseUrl: '{{ integration_config.vault_test_server_http }}/v1' diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/vault_config.hcl.j2 b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/vault_config.hcl.j2 new file mode 100644 index 000000000..ec89c20e1 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/templates/vault_config.hcl.j2 @@ -0,0 +1,10 @@ +# {{ ansible_managed }} +listener "tcp" { + tls_key_file = "/vault/config/{{ vault_key_file | basename }}" + tls_cert_file = "/vault/config/{{ vault_cert_file | basename }}" + tls_disable = false + address = "{{ + integration_config.vault_test_server_https + | regex_replace('^https://([^:]+):(\\d+).*?$', '\\1:\\2') + }}" +} diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/vars/local_client.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/vars/local_client.yml new file mode 100644 index 000000000..96b718756 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/vars/local_client.yml @@ -0,0 +1,15 @@ +# WIP: experimental/broken +# a configuration for running the tests locally (outside the container network) +--- +vault_target_name: localhost +proxy_target_name: localhost + +integration_config: + vault_version: '{{ vault_version }}' + vault_test_server_http: 'http://{{ vault_target_name }}:{{ vault_port_http }}' + vault_test_server_https: 'https://{{ vault_target_name }}:{{ vault_port_https}}' + vault_dev_root_token_id: '{{ vault_dev_root_token_id }}' + vault_proxy_server: 'http://{{ proxy_target_name }}:{{ proxy_port }}' + vault_cert_content: "{{ lookup('file', vault_cert_file) }}" + vault_proxy_alt_vault_http: 'http://{{ vault_container_name }}:{{ vault_port_http }}' + vault_proxy_alt_vault_https: 'https://{{ vault_container_name }}:{{ vault_port_https }}' diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/vars/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/vars/main.yml new file mode 100644 index 000000000..fd0a0c36b --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_docker/vars/main.yml @@ -0,0 +1,9 @@ +--- +integration_config: + vault_version: '{{ vault_version }}' + vault_test_server_http: 'http://{{ vault_target_name }}:{{ vault_port_http }}' + vault_test_server_https: 'https://{{ vault_target_name }}:{{ vault_port_https}}' + vault_dev_root_token_id: '{{ vault_dev_root_token_id }}' + vault_proxy_server: 'http://{{ proxy_target_name }}:{{ proxy_port }}' + vault_cert_content: "{{ lookup('file', vault_cert_file) }}" + vault_mmock_server_http: 'http://{{ mmock_target_name }}:{{ mmock_server_port }}' diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/README.md b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/README.md new file mode 100644 index 000000000..1f9641e3b --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/README.md @@ -0,0 +1,4 @@ +# `setup_localenv_gha` +A special case "localenv" role that isn't for end-user use, but rather specifically for use in the collection's GitHub Actions CI. + +It uses `setup_localenv_docker` but with pre-generated PKI and directly uses the `docker-compose` CLI in CI, in order to avoid additional dependencies and cut down execution time. diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/aliases new file mode 100644 index 000000000..80fa188f1 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/aliases @@ -0,0 +1,2 @@ +hidden +needs/target/setup_localenv_docker diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/defaults/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/defaults/main.yml new file mode 100644 index 000000000..5f04a155c --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/defaults/main.yml @@ -0,0 +1,4 @@ +--- +output_dir: '{{ role_path }}/files/.output' +docker_compose_project_name: hashi_vault +docker_compose: none diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/files/.gitignore b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/files/.gitignore new file mode 100644 index 000000000..bcb1622af --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/files/.gitignore @@ -0,0 +1,2 @@ +.output/ +!.output/vault_config/*.pem diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/files/.output/vault_config/cert.pem b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/files/.output/vault_config/cert.pem new file mode 100644 index 000000000..42c72e891 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/files/.output/vault_config/cert.pem @@ -0,0 +1,29 @@ +-----BEGIN CERTIFICATE----- +MIIE4TCCAsmgAwIBAgIUIL77ChgfiJcegZViZh8TV9KviB8wDQYJKoZIhvcNAQEL +BQAwEDEOMAwGA1UEAwwFdmF1bHQwHhcNMjEwNzAxMTgyNjIzWhcNMzEwNjI5MTgy +NjIzWjAQMQ4wDAYDVQQDDAV2YXVsdDCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCC +AgoCggIBAK1mI97qE0Pijd0vYrepaoZen/7yuayVslJ1uKzNQWCwHCpaAG9QuGof +ym2N4UnCnBAmpcOH8N+xVzFR5oQVJwYLodfPeI8VDcHvN6Cj011Lb40o2fAbQz+v +2Xz4MOpEzRbCrSlMZa7M10+iiZJpU379Yqn6JDiCkwrq4dIBVUnp8Wg5ykV+qdUz +ypB6jJF67WRisXqYp7hACL67FRlj/r8+76FSUg/oAo7g0rkrahWb+SvNMPWS1hS7 +Tk8Tjf6qb2ZO2Iwx0eEY2GNoziUzQu1xTkZQhbQt3vt8ZiWvddCRxCz5W1cfKWO/ +0XQlMlqlW2RmfxEouMUqhQlH1NSYCKas12RFxxSaiPI2Idq2Kve2xDLwQuIOoCLD +joqSQmyF88f9Jxb0l2sXXsro/YwmYJ+qwz81QW4BO5LzX7BHT7EuiV97m8kLuBzE +0pjat0XVJ9fzmfGYX89uiwnO/fb7jZubjQLus0cmZXHMB6wK0fo8bTXgi9TLXwbi +wymDG+A+jlSShf7aE3vZtr6fRcacjwh6Y6DFbfxdVV9Vxzv1aHaMUYwtIu+d+uVC +cTQbwou6B7hS0BUdXQKtM1mjPDAwcdgz//TLzk26tIsx69AXtOREKb9W1ffIbfGh +B8nrdmI6+80tlc8KL6s+/cMLEMya3K2GZPloAw+CO00ihO7SGC6VAgMBAAGjMzAx +MBAGA1UdEQQJMAeCBXZhdWx0MB0GA1UdDgQWBBRbYPX83fHK2QUTsWExMQESmS6B +yTANBgkqhkiG9w0BAQsFAAOCAgEAc9slz9up7xd3bsr+q/kCoDt+w6rm/dc6ONSJ +PITZAbuWtRBtCJStQuie5ZICnh1X0IajhczIFVcD9CjxOIxfxA7S49gL9vDHVpiJ +K4nW0KR3Zviq2XwtHYAs99CZH63EUTVqz0nEuMu10H/0PCFPtTHcXFpgovCLRAGH +HqnM7LVeM7a0g85Zt+HXuPJ2MThlEyIBy64MBPIczSiGDVx0cQwe1LJREkQJgB4F ++3iAOPIsHAWkApFfx2cyq+L4sEd0EdxUFk1mw4sni/VFzK8wcd3L7gEgseKSk2Kz +Z3JZiAXessjoa26JL0/KBSN6LTB3/pdn/dG7lz7DUr17PymbggRLVjdbSsbAFD9S +BTGgt3kFjrXIeNAyqGodK673R5jACXjz0vuEePJh4Vk/ffl953bH8Xhs/BZyNXBC +meOu/sU93MGPi0vqE+Jdjplvj5smLzOW9Y6HAAitDqHTQ0sNdZP2DdT6FkSLSzMx +ErPFBePgxhQFXvQt8h11Gadox5vsm1Ca2nLGClKWRt458goFEWgmmDA+mOD2/sJ0 +eYdGfBgN/ZnTzD2y2z18Sd9H2Zb4HZbfpPvvFwR+5oYMqE72Rz3oj8APt2f/Eq5B +WYNpi1fRCGPXhM2wNr2DKPKcoqbRcIVTxD/E0MATmxQRGtPaO/JbFcY5v37qcLWf +Jb3iD/U= +-----END CERTIFICATE----- diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/files/.output/vault_config/key.pem b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/files/.output/vault_config/key.pem new file mode 100644 index 000000000..cdfc143fa --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/files/.output/vault_config/key.pem @@ -0,0 +1,51 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIJJwIBAAKCAgEArWYj3uoTQ+KN3S9it6lqhl6f/vK5rJWyUnW4rM1BYLAcKloA +b1C4ah/KbY3hScKcECalw4fw37FXMVHmhBUnBguh1894jxUNwe83oKPTXUtvjSjZ +8BtDP6/ZfPgw6kTNFsKtKUxlrszXT6KJkmlTfv1iqfokOIKTCurh0gFVSenxaDnK +RX6p1TPKkHqMkXrtZGKxepinuEAIvrsVGWP+vz7voVJSD+gCjuDSuStqFZv5K80w +9ZLWFLtOTxON/qpvZk7YjDHR4RjYY2jOJTNC7XFORlCFtC3e+3xmJa910JHELPlb +Vx8pY7/RdCUyWqVbZGZ/ESi4xSqFCUfU1JgIpqzXZEXHFJqI8jYh2rYq97bEMvBC +4g6gIsOOipJCbIXzx/0nFvSXaxdeyuj9jCZgn6rDPzVBbgE7kvNfsEdPsS6JX3ub +yQu4HMTSmNq3RdUn1/OZ8Zhfz26LCc799vuNm5uNAu6zRyZlccwHrArR+jxtNeCL +1MtfBuLDKYMb4D6OVJKF/toTe9m2vp9FxpyPCHpjoMVt/F1VX1XHO/VodoxRjC0i +75365UJxNBvCi7oHuFLQFR1dAq0zWaM8MDBx2DP/9MvOTbq0izHr0Be05EQpv1bV +98ht8aEHyet2Yjr7zS2Vzwovqz79wwsQzJrcrYZk+WgDD4I7TSKE7tIYLpUCAwEA +AQKCAgBRFMLPOJs7khOOCttZUEH8hQDBoVRLLFCPGHb7bpsUkCULdLxhUNh631Sz +t7VR3UtGjhvS+50ZrH4+FLL6rj1qKURLertABLNDLQ3Q8uoh5OVLl3+ZM9ZVUHAd +bJzK3tMbwnpgJlYhz60aksFLki614dwh6VLIjd4eK8jefzsXbeoxN1yq9FiL1kTx +HCzg1h9tYmzlC77ZfC/ap/ZswgJcARziTXEL+QxBLUW7yl43rpBr9+3d1wR9+zvZ +5CXwoFeo+lNt8tnWLNz6VwaWBItWJjZY0MxEfXKYHApTsuEfXxnhgdRBP9QFzasu +aTpLgqjioL7oEiecIo7E0S7nHhRkvmhl/yZlI8PcneK7kRbobJJ7mNr65PA2yQIj +a4hMNr9bvYMZqkhqkXe5vFxmUBhc7MfoOFKP1wHHrEc1Gv4hNl/RXO8G72pN/otf +OdRCVluveEdUfjfjo10ptN/hqmnTs5ryz2QfISetpnDbebgPB9pgBYOEb27pdxEf +mLPH+dLVDAEFQRnLqAHIB7P1NtfsCgSmhbrIHvDO6qm39NOtmeJBvHzm/aC0JEfI +CeDMA80zB5nDsHKztL65VTDQ5okTtfMkAuzXhDEGprEnk2hi4LYWurvEgdE01rL0 +Yk/FCH4Ae+YSd/yqBjTaDJ2kkwGeyR562i7AOf4KoECSdwdqnQKCAQEA35dXubjy +BXfbNewCiK4I0wkkD88QFiSAZBQi3mK+1ZI6eB58Rvrq/dE226fw2fnzuV6mYQoD +/mok3hpHM7MTtCg9vMvMq4sW47cpjYsMxHLIYvpQxfJ74Ja/vlpxAa9EJDhClKQu +1vmkNqpxdmqOLM7EQgjJ7q5WVm528wPBMcoO/MaVNGLj0Mw63NIiX7i0QldVnc1r +oMRN6HVpXrPqBNUjUtOxXAFdfVc/ecm8PbS0UCGQxS0rL/m0/nFuxDwPQ6sdgCsr +FgMSvRS/aWuDvHfwicCLSOSAwtLD7gyk9/li3R62sP9ZmH54eqMaOOYbYZIVrRDZ +fIdfi67dwz1hSwKCAQEAxohY3tqmDUspah/UHofn2gqIuK8RIkRzrHvDXu919fzn +LEbN+aeWR6nFPPU3N3YCSS8GRxC6BK0AiCTtjoSpzc7PY1X7EZ2LVNYvLTLwG2Q5 +JEkk2z7Sh3Ckb3C4RXlYGO/VueS8uduJKllC8gDILaWt4CSDFt76ftwW1Ykd0z6Y +ttpcEkeOXwfvq0MKTh60584A7+WeWN2SnQ5Nd6yZ2UdMvHs9AA7Z2pI0cPqKZTMJ +/C0F/hjVlK3IzKNXhySITCPfLCc8zG+NdcTcMq9oORg4iSWi6vfdBCKAoxl/tZSd +NMBSIXF2Et8F2dGOQXctoktot1DJ3D/ZDaQp8FKjnwKCAQA/ptUJgTYdCmb0bdC2 +2lyWv3ZtrbPVqeSHGRhoGNJc+Hj+sycMCeiWA1ZLp/6v/zE8J07UN0a1yb/fHroA +ZM2KLx6MPbRxbXJBt2Xz7o7e6hJT4xDVmVdcs224ogSjxyJt8To7GJZhCsaN7W3J +mUIKTX6fkeLyuNzJVD82pW39X5luqpXVSQWz7kAflw7EE0/1xukPuGgYZ2oKcEpj +9HH9mDOrI6rNF3Jm+UyPvjeBlYzzdurAx0ARM1QsT1yKLtLln7QRRbjJVmyiOWLY +fwwMvlRS0uaUBMEHNXsjrKPWGD/l1RWLQ9ZczDw+JWqUoiVPXIghMbfIdL1lhY2K +RFILAoIBAFcM8erFdInfqwkda6T3cnHorIpOOPUNjqrSmZfG57I0cii34xjUotBJ +YHKaEtv+ooH0XIxMiUQDl3gauQ8EnG/hfo3P5YaTzcQlJgpri1x3VZbTe5Wmtiks ++uziP+o2iqpkfxJDeX0FBd/Lw87ZCOz6+IKPf+tHWg7F8j0vIiS+Dbgfhr99ILAK +isxNWPdn/2qkqUSHKidNADxdSRpwVAUxfjQm1VhxKpTsBpKDSKuW9YndNAN2YDAR +Azq3ZGmdWyYHlJOHoOEDCMPazbJrtwdR13hLSGQ+ympFtqHNTHIR99xpd5myZlNf +9vDLMdUCrL+80O2QPvKUDFgu4zS9B4ECggEAEx1/ZRmiVuWNYtQcAvK4Ub95gitY +1EG6AsdNVrv5AwMK7lab+WWhIxASHhge9yPGuQ0DAEVGkGkSsEOjHlnydT6L9lfE +s3vx1oT6xdDQsgRpNSocbrE8RP4p+Uvce7PfDofnCzjWnfv7YsRtASPxjjZe70uP +4RweQaXq34dPoJ0ZeVI46FfIfNz8AicbRUyS7oow9cWkyhq4GD8Ch8a7DruP7KDX +2UCF9iwv6wwoqvxIeL3fHkdpsZ26+bie65RPsdxPY6F83vJa9AXr//grAAd44EKz +x/nAN0TijTDbbqBbYipUOMghC8sJlmPQNpypqNklXb0TywNvgcuxw6IzAg== +-----END RSA PRIVATE KEY----- diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/files/playbooks/gha.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/files/playbooks/gha.yml new file mode 100644 index 000000000..aea370d3f --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/files/playbooks/gha.yml @@ -0,0 +1,5 @@ +--- +- hosts: localhost + gather_facts: no + roles: + - setup_localenv_gha diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/setup.sh b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/setup.sh new file mode 100755 index 000000000..5317aa407 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/setup.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +set -ex + +pushd "${BASH_SOURCE%/*}" + +ANSIBLE_ROLES_PATH="../" \ + ansible-playbook files/playbooks/gha.yml "${@}" + +# launch containers +files/.output/launch.sh + +# copy generated integration_config.yml if it doesn't exist +cp -n files/.output/integration_config.yml ../../ || true + +popd diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/tasks/main.yml new file mode 100644 index 000000000..f241aafaa --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/tasks/main.yml @@ -0,0 +1,15 @@ +--- +- name: "Persist defaults" + set_fact: + output_dir: '{{ output_dir }}' + docker_compose: '{{ docker_compose }}' + docker_compose_project_name: '{{ docker_compose_project_name }}' + +- import_role: + name: setup_localenv_docker + +- name: "Template the launch script" + template: + src: launch.sh.j2 + dest: '{{ output_dir }}/launch.sh' + mode: '+x' diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/templates/launch.sh.j2 b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/templates/launch.sh.j2 new file mode 100644 index 000000000..6b89f2f26 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_localenv_gha/templates/launch.sh.j2 @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +docker-compose -f "{{ docker_compose_file }}" up -d diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure/README.md b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure/README.md new file mode 100644 index 000000000..8b2382577 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure/README.md @@ -0,0 +1,2 @@ +# `setup_vault_configure` +Performs initial configuration of the Vault server with basic things intended to be used by many tests, such as a variety of kv secrets. Individual auth methods and other targets are responsible for their own setup. diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure/aliases new file mode 100644 index 000000000..4b3a017fd --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure/aliases @@ -0,0 +1,2 @@ +hidden +needs/target/setup_vault_test_plugins diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure/meta/main.yml new file mode 100644 index 000000000..290705e5e --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure/meta/main.yml @@ -0,0 +1,3 @@ +--- +dependencies: + - setup_vault_test_plugins diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure/tasks/configure.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure/tasks/configure.yml new file mode 100644 index 000000000..2b6694e65 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure/tasks/configure.yml @@ -0,0 +1,133 @@ +--- +- name: 'Create KV v1 secrets engine' + vault_ci_enable_engine: + backend_type: kv + path: '{{ vault_kv1_mount_point }}' + options: + version: 1 + +- name: 'Create KV v2 secrets engine' + vault_ci_enable_engine: + backend_type: kv + path: '{{ vault_kv2_mount_point }}' + options: + version: 2 + +- name: 'Create KV v2 secrets engine to test unauthorized access' + vault_ci_enable_engine: + backend_type: kv + path: '{{ unauthorized_vault_kv2_mount_point }}' + options: + version: 2 + +- name: 'Create KV v2 secrets engine for cas_required=True' + vault_ci_enable_engine: + backend_type: kv + path: '{{ cas_required_vault_kv2_mount_point }}' + options: + version: 2 + +- name: Set cas_required=True + vault_ci_write: + path: "{{ cas_required_vault_kv2_mount_point }}/config" + data: + cas_required: true + +- name: Create a test policy + vault_ci_policy_put: + name: test-policy + policy: "{{ vault_test_policy }}" + +- name: Create an alternate policy + vault_ci_policy_put: + name: alt-policy + policy: "{{ vault_alt_policy }}" + +- name: Create a token creator policy + vault_ci_policy_put: + name: token-creator + policy: '{{ vault_token_creator_policy }}' + +- name: Create an orphan creator policy + vault_ci_policy_put: + name: orphan-creator + policy: '{{ vault_orphan_creator_policy }}' + +- name: Create a policy allowing access to invalid kv2 paths + vault_ci_policy_put: + name: invalid-kv2 + policy: '{{ vault_invalid_kv2_path_policy }}' + +- name: 'Create KV v1 secrets' + loop: [1, 2, 3] + vault_ci_kv_put: + path: "{{ vault_kv1_path }}/secret{{ item }}" + version: 1 + mount_point: '{{ vault_kv1_mount_point }}' + secret: + value: 'foo{{ item }}' + +- name: 'Create KV v2 secrets' + loop: [1, 2, 3, 4, 5] + vault_ci_kv_put: + path: "{{ vault_kv2_path }}/secret{{ item }}" + version: 2 + mount_point: '{{ vault_kv2_mount_point }}' + secret: + value: 'foo{{ item }}' + +- name: 'Create KV v2 secrets in unauthorized path' + loop: [1, 2, 3, 4, 5] + vault_ci_kv_put: + path: "{{ vault_kv2_path }}/secret{{ item }}" + version: 2 + mount_point: '{{ unauthorized_vault_kv2_mount_point }}' + secret: + value: 'foo{{ item }}' + +- name: 'Update KV v2 secret4 with new value to create version' + vault_ci_kv_put: + path: "{{ vault_kv2_path }}/secret4" + version: 2 + mount_point: '{{ vault_kv2_mount_point }}' + secret: + value: 'foo5' + +- name: 'Create multiple KV v2 secrets under one path' + vault_ci_kv_put: + path: "{{ vault_kv2_multi_path }}/secrets" + version: 2 + mount_point: '{{ vault_kv2_mount_point }}' + secret: + value1: foo1 + value2: foo2 + value3: foo3 + +- name: Remove existing two-versioned secret + vault_ci_kv2_destroy_all: + mount_point: '{{ vault_kv2_mount_point }}' + path: '{{ vault_kv2_versioned_path }}/twover' + +- name: Set up a two-versioned secret (v1) + vault_ci_kv_put: + version: 2 + mount_point: '{{ vault_kv2_mount_point }}' + path: '{{ vault_kv2_versioned_path }}/twover' + secret: + a: one + v: 1 + +- name: Set up a two-versioned secret (v2) + vault_ci_kv_put: + version: 2 + mount_point: '{{ vault_kv2_mount_point }}' + path: '{{ vault_kv2_versioned_path }}/twover' + secret: + a: two + v: 2 + +- name: 'Write Canary' + vault_ci_write: + path: '{{ vault_configure_canary.path }}' + data: + value: '{{ vault_configure_canary.value }}' diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure/tasks/main.yml new file mode 100644 index 000000000..200db9811 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure/tasks/main.yml @@ -0,0 +1,18 @@ +--- +- name: Configuration tasks + module_defaults: + vault_ci_enable_engine: '{{ vault_plugins_module_defaults_common }}' + vault_ci_kv_put: '{{ vault_plugins_module_defaults_common }}' + vault_ci_kv2_destroy_all: '{{ vault_plugins_module_defaults_common }}' + vault_ci_policy_put: '{{ vault_plugins_module_defaults_common }}' + vault_ci_read: '{{ vault_plugins_module_defaults_common }}' + vault_ci_write: '{{ vault_plugins_module_defaults_common }}' + block: + - name: 'Canary for Vault basic setup' + vault_ci_read: + path: '{{ vault_configure_canary.path }}' + register: canary + + - name: 'Configure Vault basic setup' + include_tasks: configure.yml + when: canary.result is none diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure/vars/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure/vars/main.yml new file mode 100644 index 000000000..c5880105c --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure/vars/main.yml @@ -0,0 +1,164 @@ +--- +vault_configure_canary: + path: cubbyhole/configure_basic + value: complete # value does not matter + +vault_kv1_mount_point: kv1 +vault_kv1_path: testproject +vault_kv1_api_path: '{{ vault_kv1_mount_point }}/{{ vault_kv1_path }}' + +vault_kv2_mount_point: kv2 +unauthorized_vault_kv2_mount_point: kv2_noauth +cas_required_vault_kv2_mount_point: kv2_cas + +vault_kv2_path: testproject +vault_kv2_multi_path: testmulti +vault_kv2_versioned_path: versioned + +vault_kv2_api_path: '{{ vault_kv2_mount_point }}/data/{{ vault_kv2_path }}' +vault_kv2_multi_api_path: '{{ vault_kv2_mount_point }}/data/{{ vault_kv2_multi_path }}' +vault_kv2_versioned_api_path: '{{ vault_kv2_mount_point }}/data/{{ vault_kv2_versioned_path }}' +vault_kv2_delete_api_path: '{{ vault_kv2_mount_point }}/delete/{{ vault_kv2_versioned_path }}' +vault_kv2_metadata_api_path: '{{ vault_kv2_mount_point }}/metadata/{{ vault_kv2_versioned_path }}' +vault_kv2_api_list_mount_point: '{{ vault_kv2_mount_point }}/metadata' +vault_kv2_api_list_path: '{{ vault_kv2_mount_point }}/metadata/{{ vault_kv2_path }}' + +vault_policy_api_list_path: 'sys/policies/acl' + +vault_kv2_api_list_inexistent_path: '{{ vault_kv2_mount_point }}/metadata/__inexistent' +vault_kv2_api_list_inexistent_mount_point: '{{ vault_kv2_mount_point }}__inexistent/metadata' +vault_kv2_api_list_inexistent_unauthorized_path: '{{ vault_kv2_mount_point }}/metadata/__inexistent_no_auth' +vault_kv2_api_list_unauthorized_path: '{{ unauthorized_vault_kv2_mount_point }}/metadata' + +vault_base_policy: | + path "{{ vault_kv1_api_path }}/secret1" { + capabilities = ["read"] + } + path "{{ vault_kv1_api_path }}/secret2" { + capabilities = ["read", "update"] + } + path "{{ vault_kv1_api_path }}/secret3" { + capabilities = ["deny"] + } + path "{{ vault_kv1_api_path }}/non_existent_secret" { + capabilities = ["read"] + } + + path "{{ vault_kv2_api_path }}/secret1" { + capabilities = ["read"] + } + path "{{ vault_kv2_api_path }}/secret2" { + capabilities = ["read", "update"] + } + path "{{ vault_kv2_api_path }}/secret3" { + capabilities = ["deny"] + } + path "{{ vault_kv2_multi_api_path }}/secrets" { + capabilities = ["read"] + } + path "{{ vault_kv2_api_path }}/secret4" { + capabilities = ["read", "update"] + } + # module_vault_kv2_write tests + path "{{ vault_kv2_api_path }}/write1" { + capabilities = ["read", "create", "update"] + } + # module_vault_kv2_write tests + path "{{ vault_kv2_api_path }}/readonly" { + capabilities = ["read"] + } + # module_vault_kv2_write tests + path "{{ cas_required_vault_kv2_mount_point }}/data/write1" { + capabilities = ["read", "create", "update"] + } + # module_vault_kv2_write tests + path "{{ vault_kv2_api_path }}/deny" { + capabilities = ["deny"] + } + # module_vault_kv2_write tests + path "{{ vault_kv2_api_path }}/404" { + capabilities = ["read", "create", "update"] + } + # module_vault_kv2_write tests + path "{{ vault_kv2_api_path }}/writeonly" { + capabilities = ["create", "update"] + } + path "{{ vault_kv2_mount_point }}/metadata/{{ vault_kv2_path }}/write1" { + capabilities = ["delete"] + } + path "{{ vault_kv2_api_path }}/non_existent_secret" { + capabilities = ["read"] + } + path "{{ vault_kv2_versioned_api_path }}/*" { + capabilities = ["read"] + } + path "{{ vault_kv2_versioned_api_path }}/secret6" { + capabilities = ["delete"] + } + path "{{ vault_kv2_versioned_api_path }}/non_existent_secret" { + capabilities = ["delete"] + } + path "{{ vault_kv2_delete_api_path }}/secret6" { + capabilities = ["create", "update"] + } + path "{{ vault_kv2_delete_api_path }}/non_existent_secret" { + capabilities = ["create", "update"] + } + path "{{ vault_kv2_metadata_api_path }}/secret6" { + capabilities = ["read"] + } + path "{{ vault_kv2_api_list_mount_point }}/*" { + capabilities = ["list"] + } + path "{{ vault_kv2_api_list_path }}" { + capabilities = ["list"] + } + path "{{ vault_policy_api_list_path }}" { + capabilities = ["list"] + } + path "{{ vault_kv2_api_list_inexistent_unauthorized_path }}" { + capabilities = ["deny"] + } + path "{{ vault_kv2_api_list_unauthorized_path }}" { + capabilities = ["deny"] + } + +vault_token_creator_policy: | + path "auth/token/create" { + capabilities = ["create", "update"] + } + path "auth/token/create/*" { + capabilities = ["create", "update"] + } + +vault_orphan_creator_policy: | + path "auth/token/create" { + capabilities = ["create", "update"] + } + path "auth/token/create/*" { + capabilities = ["create", "update"] + } + path "auth/token/create-orphan" { + capabilities = ["create", "update", "sudo"] + } + +vault_invalid_kv2_path_policy: | + path "{{ vault_kv2_mount_point }}/{{ vault_kv2_path }}/*" { + capabilities = ["read"] + } + +# the purpose of these policies is to catch when the plugin accepts mount_point but does not pass +# it into hvac. we set the test policy to deny access to this secret and the alt policy to read it +# the test-policy is assigned to the default mount of the auth method, while the alt policy is +# assigned to the alternate (non-default) mount +vault_test_policy: | + {{ vault_base_policy }} + path "{{ vault_kv2_api_path }}/secret5" { + capabilities = ["deny"] + } + +vault_alt_policy: | + {{ vault_base_policy }} + path "{{ vault_kv2_api_path }}/secret5" { + capabilities = ["read"] + } diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure_engine_pki/README.md b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure_engine_pki/README.md new file mode 100644 index 000000000..44a6b9e29 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure_engine_pki/README.md @@ -0,0 +1,2 @@ +# `setup_vault_configure_engine_pki` +Performs configuration of the PKI engine in Vault. diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure_engine_pki/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure_engine_pki/aliases new file mode 100644 index 000000000..4b3a017fd --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure_engine_pki/aliases @@ -0,0 +1,2 @@ +hidden +needs/target/setup_vault_test_plugins diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure_engine_pki/meta/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure_engine_pki/meta/main.yml new file mode 100644 index 000000000..290705e5e --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure_engine_pki/meta/main.yml @@ -0,0 +1,3 @@ +--- +dependencies: + - setup_vault_test_plugins diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure_engine_pki/tasks/configure.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure_engine_pki/tasks/configure.yml new file mode 100644 index 000000000..c96e22ebe --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure_engine_pki/tasks/configure.yml @@ -0,0 +1,44 @@ +--- +- name: Create PKI secrets engine + vault_ci_enable_engine: + backend_type: pki + +- name: Generate self-signed root CA + vault_ci_write: + path: /pki/root/generate/internal + data: + common_name: ca.example.org + +- name: Configure URL values for issue certificate endpoints + vault_ci_write: + path: /pki/config/urls + data: + issuing_certificates: http://myvault:8200/v1/pki/ca + crl_distribution_points: http://myvault:8200/v1/pki/crl + +- name: Creating test role + vault_ci_write: + path: /pki/roles/test.example.org + data: + allowed_domains: test.example.org + allow_subdomains: true + max_ttl: 24h + +- name: Create a test policy + vault_ci_policy_put: + name: test-pki-policy + policy: |- + path "pki/issue/*" { + capabilities = ["read", "update"] + } + +- name: Create a test non-root token + vault_ci_token_create: + policies: test-pki-policy + register: user_token_cmd + +- name: 'Write Canary' + vault_ci_write: + path: '{{ vault_configure_engine_pki_canary.path }}' + data: + value: '{{ vault_configure_engine_pki_canary.value }}' diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure_engine_pki/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure_engine_pki/tasks/main.yml new file mode 100644 index 000000000..3753480e8 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure_engine_pki/tasks/main.yml @@ -0,0 +1,17 @@ +--- +- name: Configuration tasks + module_defaults: + vault_ci_enable_engine: '{{ vault_plugins_module_defaults_common }}' + vault_ci_read: '{{ vault_plugins_module_defaults_common }}' + vault_ci_write: '{{ vault_plugins_module_defaults_common }}' + vault_ci_policy_put: '{{ vault_plugins_module_defaults_common }}' + vault_ci_token_create: '{{ vault_plugins_module_defaults_common }}' + block: + - name: Canary for Vault PKI engine setup + vault_ci_read: + path: '{{ vault_configure_engine_pki_canary.path }}' + register: canary + + - name: Configure Vault PKI engine basic setup + include_tasks: configure.yml + when: canary.result is none diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure_engine_pki/vars/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure_engine_pki/vars/main.yml new file mode 100644 index 000000000..70d06cf10 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_configure_engine_pki/vars/main.yml @@ -0,0 +1,4 @@ +--- +vault_configure_engine_pki_canary: + path: cubbyhole/configure_engine_pki + value: complete # value does not matter diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_server_cert/README.md b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_server_cert/README.md new file mode 100644 index 000000000..1d12c29d1 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_server_cert/README.md @@ -0,0 +1,5 @@ +# `setup_vault_server_cert` +Generates and key and self-signed certificate for the Vault server. + +## Notes +* Requires the [`community.crypto` collection](https://galaxy.ansible.com/community/crypto). diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_server_cert/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_server_cert/aliases new file mode 100644 index 000000000..136c05e0d --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_server_cert/aliases @@ -0,0 +1 @@ +hidden diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_server_cert/tasks/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_server_cert/tasks/main.yml new file mode 100644 index 000000000..a2997bed5 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_server_cert/tasks/main.yml @@ -0,0 +1,28 @@ +--- +- name: "Cert generation tasks" + vars: + vault_csr_file: '{{ vault_key_file | dirname }}/csr.csr' + block: + - name: Generate privatekey + community.crypto.openssl_privatekey: + mode: 'o=r' + path: '{{ vault_key_file }}' + + - name: Generate CSR + vars: + vault_dns_names: '{{ [vault_hostname] + (vault_alternate_hostnames | default([])) }}' + community.crypto.openssl_csr: + mode: 'o=r' + path: '{{ vault_csr_file }}' + privatekey_path: '{{ vault_key_file }}' + subject_alt_name: "{{ vault_dns_names | map('regex_replace', '^', 'DNS:') | list }}" + + - name: Generate selfsigned certificate + community.crypto.x509_certificate: + mode: 'o=r' + path: '{{ vault_cert_file }}' + csr_path: '{{ vault_csr_file }}' + privatekey_path: '{{ vault_key_file }}' + provider: selfsigned + selfsigned_digest: sha256 + register: selfsigned_certificate diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/README.md b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/README.md new file mode 100644 index 000000000..50fc9cbe2 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/README.md @@ -0,0 +1,2 @@ +# `setup_vault_test_plugins` +Contains plugins/modules that are used only in testing. diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/aliases b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/aliases new file mode 100644 index 000000000..136c05e0d --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/aliases @@ -0,0 +1 @@ +hidden diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_enable_auth.py b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_enable_auth.py new file mode 100644 index 000000000..2e6b7313b --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_enable_auth.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause) +# SPDX-License-Identifier: BSD-2-Clause + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +from ansible.module_utils.basic import AnsibleModule +import hvac +import re + + +def main(): + # corresponds to https://hvac.readthedocs.io/en/stable/usage/system_backend/auth.html#enable-auth-method + module = AnsibleModule( + argument_spec=dict( + url=dict(type='str', required=True), + token=dict(type='str', required=True), + method_type=dict(type='str', required=True), + path=dict(type='str'), + config=dict(type='dict'), + kwargs=dict(type='dict'), + ), + ) + + p = module.params + + client = hvac.Client(url=p['url'], token=p['token']) + + try: + client.sys.enable_auth_method( + method_type=p['method_type'], + path=p['path'], + config=p['config'], + kwargs=p['kwargs'], + ) + + except hvac.exceptions.InvalidRequest as e: + if not str(e).startswith('path is already in use'): + raise + + path = re.sub(r'^path is already in use at ([^/]+)/.*?$', r'\1', str(e)) + + methods = client.sys.list_auth_methods()['data'] + if p['path'] and p['path'] != path: + raise + + this_method = methods[path + '/'] + if this_method['type'] != p['method_type']: + raise + + module.warn("path in use ('%s'); retuning." % str(e)) + + client.sys.tune_auth_method( + path=path, + config=p['config'], + kwargs=p['kwargs'], + ) + + module.exit_json(changed=True) + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_enable_engine.py b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_enable_engine.py new file mode 100644 index 000000000..5176c904c --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_enable_engine.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause) +# SPDX-License-Identifier: BSD-2-Clause + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +from ansible.module_utils.basic import AnsibleModule +import hvac + + +def main(): + # corresponds to https://hvac.readthedocs.io/en/stable/usage/system_backend/mount.html#enable-secrets-engine + module = AnsibleModule( + argument_spec=dict( + url=dict(type='str', required=True), + token=dict(type='str', required=True), + backend_type=dict(type='str', required=True), + path=dict(type='str'), + config=dict(type='dict'), + options=dict(type='dict'), + kwargs=dict(type='dict'), + ), + ) + + p = module.params + + client = hvac.Client(url=p['url'], token=p['token']) + + try: + client.sys.enable_secrets_engine( + backend_type=p['backend_type'], + path=p['path'], + config=p['config'], + options=p['options'], + kwargs=p['kwargs'], + ) + + except hvac.exceptions.InvalidRequest as e: + if not str(e).startswith('path is already in use'): + raise + + p['path'] = p['path'] or p['backend_type'] + engines = client.sys.list_mounted_secrets_engines()['data'] + this_engine = engines[p['path'].strip('/') + '/'] + if this_engine['type'] != p['backend_type']: + raise + + module.warn("path '%s' of type '%s' already exists; retuning." % (p['path'], this_engine['type'])) + + client.sys.tune_mount_configuration( + path=p['path'], + config=p['config'], + options=p['options'], + kwargs=p['kwargs'], + ) + + module.exit_json(changed=True) + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_kv2_destroy_all.py b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_kv2_destroy_all.py new file mode 100644 index 000000000..810dbe4c6 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_kv2_destroy_all.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause) +# SPDX-License-Identifier: BSD-2-Clause + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +from ansible.module_utils.basic import AnsibleModule +import hvac + + +def main(): + module = AnsibleModule( + argument_spec=dict( + url=dict(type='str', required=True), + token=dict(type='str', required=True), + path=dict(type='str'), + mount_point=dict(type='str'), + ), + ) + + p = module.params + + client = hvac.Client(url=p['url'], token=p['token']) + + extra = {} + if p['mount_point'] is not None: + extra['mount_point'] = p['mount_point'] + + client.secrets.kv.v2.delete_metadata_and_all_versions( + path=p['path'], + **extra + ) + + module.exit_json(changed=True) + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_kv2_metadata_read.py b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_kv2_metadata_read.py new file mode 100644 index 000000000..0532124ed --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_kv2_metadata_read.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Isaac Wagner (@idwagner) +# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause) +# SPDX-License-Identifier: BSD-2-Clause + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +import traceback + +from ansible.module_utils.basic import AnsibleModule +import hvac + + +def main(): + module = AnsibleModule( + argument_spec=dict( + url=dict(type='str', required=True), + token=dict(type='str', required=True), + path=dict(type='str'), + mount_point=dict(type='str'), + ), + ) + + p = module.params + + client = hvac.Client(url=p['url'], token=p['token']) + + extra = {} + if p['mount_point'] is not None: + extra['mount_point'] = p['mount_point'] + + try: + result = client.secrets.kv.v2.read_secret_metadata(path=p['path'], **extra) + except Exception as e: + module.fail_json(msg=str(e), exception=traceback.format_exc()) + + module.exit_json(changed=True, result=result) + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_kv_put.py b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_kv_put.py new file mode 100644 index 000000000..6d3318140 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_kv_put.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause) +# SPDX-License-Identifier: BSD-2-Clause + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +from ansible.module_utils.basic import AnsibleModule +import hvac + + +def main(): + module = AnsibleModule( + argument_spec=dict( + url=dict(type='str', required=True), + token=dict(type='str', required=True), + path=dict(type='str'), + mount_point=dict(type='str'), + secret=dict(type='dict', required=True), + version=dict(type='int', default=2) + ), + ) + + p = module.params + + client = hvac.Client(url=p['url'], token=p['token']) + + client.secrets.kv.default_kv_version = p['version'] + + extra = {} + if p['mount_point'] is not None: + extra['mount_point'] = p['mount_point'] + + client.secrets.kv.create_or_update_secret( + path=p['path'], + secret=p['secret'], + **extra + ) + + module.exit_json(changed=True) + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_policy_put.py b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_policy_put.py new file mode 100644 index 000000000..d5e096a13 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_policy_put.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause) +# SPDX-License-Identifier: BSD-2-Clause + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +from ansible.module_utils.basic import AnsibleModule +import hvac + + +def main(): + module = AnsibleModule( + argument_spec=dict( + url=dict(type='str', required=True), + token=dict(type='str', required=True), + name=dict(type='str', required=True), + policy=dict(type='raw', required=True), + ), + ) + + p = module.params + + client = hvac.Client(url=p['url'], token=p['token']) + + client.sys.create_or_update_policy( + name=p['name'], + policy=p['policy'], + ) + + module.exit_json(changed=True) + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_read.py b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_read.py new file mode 100644 index 000000000..8e0e52bd3 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_read.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause) +# SPDX-License-Identifier: BSD-2-Clause + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +import traceback + +from ansible.module_utils.basic import AnsibleModule +import hvac + + +def main(): + module = AnsibleModule( + argument_spec=dict( + url=dict(type='str', required=True), + token=dict(type='str', required=True), + path=dict(type='str', required=True), + ), + ) + + p = module.params + + client = hvac.Client(url=p['url'], token=p['token']) + + try: + result = client.read(path=p['path']) + except Exception as e: + module.fail_json(msg=str(e), exception=traceback.format_exc()) + + module.exit_json(changed=True, result=result) + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_token_create.py b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_token_create.py new file mode 100644 index 000000000..10c9432bc --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_token_create.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause) +# SPDX-License-Identifier: BSD-2-Clause + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +from ansible.module_utils.basic import AnsibleModule +import hvac + + +def main(): + module = AnsibleModule( + argument_spec=dict( + url=dict(type='str', required=True), + token=dict(type='str', required=True), + no_default_policy=dict(type='bool', default=False), + policies=dict(type='list'), + ttl=dict(type=str, default='1h'), + ), + ) + + p = module.params + + client = hvac.Client(url=p['url'], token=p['token']) + + result = client.auth.token.create( + policies=p['policies'], + no_default_policy=p.get('no_default_policy'), + ttl=p.get('ttl'), + ) + + module.exit_json(changed=True, result=result) + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_write.py b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_write.py new file mode 100644 index 000000000..1e7da87fd --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_ci_write.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause) +# SPDX-License-Identifier: BSD-2-Clause + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +from ansible.module_utils.basic import AnsibleModule +import hvac +import json + + +def main(): + module = AnsibleModule( + argument_spec=dict( + url=dict(type='str', required=True), + token=dict(type='str', required=True), + path=dict(type='str', required=True), + data=dict(type='dict', required=True), + ), + ) + + p = module.params + + client = hvac.Client(url=p['url'], token=p['token']) + + result = client.write(path=p['path'], **p['data']) + + dictified = json.loads( + json.dumps( + result, + skipkeys=True, + default=lambda o: getattr(o, '__dict__', str(o)), + ) + ) + + module.exit_json(changed=True, result=dictified) + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_test_auth.py b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_test_auth.py new file mode 100644 index 000000000..0c1bc6fcb --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_test_auth.py @@ -0,0 +1,99 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# (c) 2020, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = """ + module: vault_test_auth + author: + - Brian Scholer (@briantist) + short_description: A module for testing centralized auth methods + description: Test auth methods by performing a login to Vault and returning token information. + extends_documentation_fragment: + - community.hashi_vault.connection + - community.hashi_vault.auth + options: + want_exception: + type: bool + default: False +""" + +import json + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_module import HashiVaultModule + + +def dictify(thing): + return json.loads( + json.dumps( + thing, + skipkeys=True, + default=lambda o: getattr(o, '__dict__', str(o)), + ) + ) + + +# this module is for running tests only; no_log can interfere with return values +# and/or makie it harder to troubleshoot test failures. +def strip_no_log(spec): + for key in list(spec.keys()): + if 'no_log' in spec[key]: + spec[key]['no_log'] = False + + +def run_module(): + argspec = HashiVaultModule.generate_argspec( + want_exception=dict(type='bool'), + ) + + strip_no_log(argspec) + + module = HashiVaultModule( + argument_spec=argspec, + supports_check_mode=False + ) + + options = module.adapter + + module.connection_options.process_connection_options() + client_args = module.connection_options.get_hvac_connection_options() + client = module.helper.get_vault_client(**client_args) + + err = msg = response = None + try: + try: + module.authenticator.validate() + response = module.authenticator.authenticate(client) + except NotImplementedError as e: + module.fail_json(msg=str(e), exception=e) + except Exception as e: + msg = str(e) + if options.get_option('want_exception'): + err = dictify(e) + else: + module.fail_json(msg=msg, exception=e) + + rob = { + 'login': response, + 'failed': False, + 'inner': {'failed': False} + } + + if err is not None: + rob['inner']['failed'] = True + rob['exception'] = err + rob['msg'] = msg + + module.exit_json(**rob) + + +def main(): + run_module() + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_test_connection.py b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_test_connection.py new file mode 100644 index 000000000..00477c874 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/library/vault_test_connection.py @@ -0,0 +1,114 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# (c) 2020, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = """ + module: vault_test_connection + author: + - Brian Scholer (@briantist) + short_description: A module for testing connection to Vault + description: Test connection to Vault and return useful information. + extends_documentation_fragment: + - community.hashi_vault.connection + options: + want_client: + type: bool + default: False + want_args: + type: bool + default: False + want_exception: + type: bool + default: False +""" +import json + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_module import HashiVaultModule + + +def dictify(thing): + return json.loads( + json.dumps( + thing, + skipkeys=True, + default=lambda o: getattr(o, '__dict__', str(o)), + ) + ) + + +def run_module(): + this = dict(_retry_count=0) + + argspec = HashiVaultModule.generate_argspec( + want_client=dict(type='bool'), + want_args=dict(type='bool'), + want_exception=dict(type='bool'), + ) + + def _generate_retry_callback(retry_action): + '''returns a Retry callback function for plugins''' + throwaway = HashiVaultModule(argument_spec=argspec) + original = throwaway._generate_retry_callback(retry_action) + + def _on_retry(retry_obj): + if retry_obj.total > 0: + this['_retry_count'] += 1 + + original(retry_obj) + + return _on_retry + + module = HashiVaultModule( + hashi_vault_custom_retry_callback=_generate_retry_callback, + argument_spec=argspec, + supports_check_mode=False + ) + + options = module.adapter + err = status = msg = None + + module.connection_options.process_connection_options() + client_args = module.connection_options.get_hvac_connection_options() + client = module.helper.get_vault_client(**client_args) + + try: + status = client.sys.read_health_status(method='GET') + except Exception as e: + if options.get_option_default('want_exception'): + err = dictify(e) + msg = str(e) + else: + raise + + rob = { + 'retries': this['_retry_count'], + 'status': status, + 'failed': False, + 'inner': {'failed': False} + } + + if options.get_option_default('want_client'): + rob['client'] = dictify(client) + + if options.get_option_default('want_args'): + rob['args'] = dictify(client_args) + + if err is not None: + rob['inner']['failed'] = True + rob['exception'] = err + rob['msg'] = msg + + module.exit_json(**rob) + + +def main(): + run_module() + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/lookup_plugins/vault_test_auth.py b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/lookup_plugins/vault_test_auth.py new file mode 100644 index 000000000..07d867428 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/lookup_plugins/vault_test_auth.py @@ -0,0 +1,85 @@ +# (c) 2020, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = """ + name: vault_test_auth + author: + - Brian Scholer (@briantist) + short_description: A plugin for testing centralized auth methods + description: Test auth methods by performing a login to Vault and returning token information. + extends_documentation_fragment: + - community.hashi_vault.connection + - community.hashi_vault.connection.plugins + - community.hashi_vault.auth + - community.hashi_vault.auth.plugins + options: + want_exception: + type: bool + default: False + vars: + - name: vault_test_auth_want_exception +""" +import json +from ansible.utils.display import Display +from ansible.errors import AnsibleError + +from ansible_collections.community.hashi_vault.plugins.plugin_utils._hashi_vault_lookup_base import HashiVaultLookupBase + +display = Display() + + +def dictify(thing): + return json.loads( + json.dumps( + thing, + skipkeys=True, + default=lambda o: getattr(o, '__dict__', str(o)), + ) + ) + + +class LookupModule(HashiVaultLookupBase): + def run(self, terms, variables=None, **kwargs): + options = self._options_adapter + err = response = msg = None + ret = [] + + if len(terms) != 0: + raise AnsibleError("Don't use a term string with this.") + + opts = kwargs.copy() + self.set_options(direct=opts, var_options=variables) + self.connection_options.process_connection_options() + client_args = self.connection_options.get_hvac_connection_options() + client = self.helper.get_vault_client(**client_args) + + try: + try: + self.authenticator.validate() + response = self.authenticator.authenticate(client) + except NotImplementedError as e: + raise AnsibleError(e) + except Exception as e: + if options.get_option('want_exception'): + err = dictify(e) + msg = str(e) + else: + raise + + rob = { + 'login': response, + 'failed': False, + } + + if err is not None: + rob['failed'] = True + rob['exception'] = err + rob['msg'] = msg + + ret.extend([rob]) + + return ret diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/lookup_plugins/vault_test_connection.py b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/lookup_plugins/vault_test_connection.py new file mode 100644 index 000000000..c5e000a6b --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/lookup_plugins/vault_test_connection.py @@ -0,0 +1,108 @@ +# (c) 2020, Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = """ + name: vault_test_connection + author: + - Brian Scholer (@briantist) + short_description: A plugin for testing connection to Vault + description: Test connection to Vault and return useful information. + extends_documentation_fragment: + - community.hashi_vault.connection + - community.hashi_vault.connection.plugins + options: + want_client: + type: bool + default: False + vars: + - name: vault_test_connection_want_client + want_args: + type: bool + default: False + vars: + - name: vault_test_connection_want_args + want_exception: + type: bool + default: False + vars: + - name: vault_test_connection_want_exception +""" +import json +from ansible.utils.display import Display + +from ansible_collections.community.hashi_vault.plugins.plugin_utils._hashi_vault_lookup_base import HashiVaultLookupBase + +display = Display() + + +def dictify(thing): + return json.loads( + json.dumps( + thing, + skipkeys=True, + default=lambda o: getattr(o, '__dict__', str(o)), + ) + ) + + +class LookupModule(HashiVaultLookupBase): + _retry_count = 0 + + def _generate_retry_callback(self, retry_action): + '''returns a Retry callback function for plugins''' + + original = super(LookupModule, self)._generate_retry_callback(retry_action) + + def _on_retry(retry_obj): + if retry_obj.total > 0: + self._retry_count += 1 + + original(retry_obj) + + return _on_retry + + def run(self, terms, variables=None, **kwargs): + options = self._options_adapter + err = status = msg = None + ret = [] + + for term in terms: + opts = kwargs.copy() + self.set_options(direct=opts, var_options=variables) + self.connection_options.process_connection_options() + client_args = self.connection_options.get_hvac_connection_options() + client = self.helper.get_vault_client(**client_args) + + try: + status = client.sys.read_health_status(method='GET') + except Exception as e: + if options.get_option('want_exception'): + err = dictify(e) + msg = str(e) + else: + raise + + rob = { + 'retries': self._retry_count, + 'status': status, + 'failed': False, + } + + if options.get_option('want_client'): + rob['client'] = dictify(client) + + if options.get_option('want_args'): + rob['args'] = dictify(client_args) + + if err is not None: + rob['failed'] = True + rob['exception'] = err + rob['msg'] = msg + + ret.extend([rob]) + + return ret diff --git a/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/vars/main.yml b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/vars/main.yml new file mode 100644 index 000000000..3f891566e --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/integration/targets/setup_vault_test_plugins/vars/main.yml @@ -0,0 +1,18 @@ +--- +vault_plugins_module_defaults_common: + url: '{{ vault_test_server_http | default(omit) }}' + token: '{{ vault_dev_root_token_id | default(omit) }}' + +# As of https://github.com/ansible/ansible/pull/74039 :: +# we can't use this dictionary directly in module_defaults anymore 😢 +# but keeping it here and updated as it's a handy copy/paste source +vault_plugins_module_defaults: + vault_ci_enable_auth: '{{ vault_plugins_module_defaults_common }}' + vault_ci_enable_engine: '{{ vault_plugins_module_defaults_common }}' + vault_ci_kv_put: '{{ vault_plugins_module_defaults_common }}' + vault_ci_kv2_destroy_all: '{{ vault_plugins_module_defaults_common }}' + vault_ci_kv2_metadata_read: '{{ vault_plugins_module_defaults_common }}' + vault_ci_policy_put: '{{ vault_plugins_module_defaults_common }}' + vault_ci_read: '{{ vault_plugins_module_defaults_common }}' + vault_ci_token_create: '{{ vault_plugins_module_defaults_common }}' + vault_ci_write: '{{ vault_plugins_module_defaults_common }}' diff --git a/ansible_collections/community/hashi_vault/tests/unit/compat/__init__.py b/ansible_collections/community/hashi_vault/tests/unit/compat/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/compat/__init__.py diff --git a/ansible_collections/community/hashi_vault/tests/unit/compat/builtins.py b/ansible_collections/community/hashi_vault/tests/unit/compat/builtins.py new file mode 100644 index 000000000..349d310e8 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/compat/builtins.py @@ -0,0 +1,33 @@ +# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +# +# Compat for python2.7 +# + +# One unittest needs to import builtins via __import__() so we need to have +# the string that represents it +try: + import __builtin__ # pylint: disable=unused-import +except ImportError: + BUILTINS = 'builtins' +else: + BUILTINS = '__builtin__' diff --git a/ansible_collections/community/hashi_vault/tests/unit/compat/mock.py b/ansible_collections/community/hashi_vault/tests/unit/compat/mock.py new file mode 100644 index 000000000..c98b26312 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/compat/mock.py @@ -0,0 +1,28 @@ +# Copyright (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com> +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +''' +Compat module for Python3.x's unittest.mock module +''' +# Python 2.7 + +# Note: Could use the pypi mock library on python3.x as well as python2.x. It +# is the same as the python3 stdlib mock library + +try: + # Allow wildcard import because we really do want to import all of mock's + # symbols into this compat shim + # pylint: disable=wildcard-import,unused-wildcard-import + from unittest.mock import * +except ImportError: + # Python 2 + # pylint: disable=wildcard-import,unused-wildcard-import + try: + from mock import * + except ImportError: + print('You need the mock library installed on python2.x to run tests') diff --git a/ansible_collections/community/hashi_vault/tests/unit/compat/unittest.py b/ansible_collections/community/hashi_vault/tests/unit/compat/unittest.py new file mode 100644 index 000000000..98f08ad6a --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/compat/unittest.py @@ -0,0 +1,38 @@ +# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +''' +Compat module for Python2.7's unittest module +''' + +import sys + +# Allow wildcard import because we really do want to import all of +# unittests's symbols into this compat shim +# pylint: disable=wildcard-import,unused-wildcard-import +if sys.version_info < (2, 7): + try: + # Need unittest2 on python2.6 + from unittest2 import * + except ImportError: + print('You need unittest2 installed on python2.6.x to run tests') +else: + from unittest import * diff --git a/ansible_collections/community/hashi_vault/tests/unit/conftest.py b/ansible_collections/community/hashi_vault/tests/unit/conftest.py new file mode 100644 index 000000000..862e93cf6 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/conftest.py @@ -0,0 +1,82 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import sys +import os +import json +import pytest + +from .compat import mock + +from ...plugins.module_utils._authenticator import HashiVaultAuthenticator + + +@pytest.fixture(autouse=True) +def skip_python(): + if sys.version_info < (3, 6): + pytest.skip('Skipping on Python %s. community.hashi_vault supports Python 3.6 and higher.' % sys.version) + + +@pytest.fixture +def fixture_loader(): + def _loader(name, parse='json'): + here = os.path.dirname(os.path.realpath(__file__)) + fixture = os.path.join(here, 'fixtures', name) + + if parse == 'path': + return fixture + + with open(fixture, 'r') as f: + if parse == 'json': + d = json.load(f) + elif parse == 'lines': + d = f.readlines() + elif parse == 'raw': + d = f.read() + else: + raise ValueError("Unknown value '%s' for parse" % parse) + + return d + + return _loader + + +@pytest.fixture +def vault_client(): + return mock.MagicMock() + + +@pytest.fixture +def authenticator(): + authenticator = HashiVaultAuthenticator + authenticator.validate = mock.Mock(wraps=lambda: True) + authenticator.authenticate = mock.Mock(wraps=lambda client: 'throwaway') + + return authenticator + + +@pytest.fixture +def patch_authenticator(authenticator): + with mock.patch('ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_module.HashiVaultAuthenticator', new=authenticator): + yield + + +@pytest.fixture +def patch_get_vault_client(vault_client): + with mock.patch( + 'ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common.HashiVaultHelper.get_vault_client', return_value=vault_client + ): + yield + + +@pytest.fixture +def requests_unparseable_response(): + r = mock.MagicMock() + r.json.side_effect = json.JSONDecodeError + + return r diff --git a/ansible_collections/community/hashi_vault/tests/unit/fixtures/approle_login_response.json b/ansible_collections/community/hashi_vault/tests/unit/fixtures/approle_login_response.json new file mode 100644 index 000000000..a9e4b2c83 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/fixtures/approle_login_response.json @@ -0,0 +1,31 @@ +{ + "auth": { + "accessor": "zFP4VJtZFNGuzRsbYH8ham5E", + "client_token": "s.urjjEppAAXAOL2EWLCXgS4CY", + "entity_id": "fa3741ea-ad23-6557-9bc7-18a86dcaf3eb", + "lease_duration": 3600, + "metadata": { + "role_name": "req-secret-id-role" + }, + "orphan": true, + "policies": [ + "alt-policy", + "approle-policy", + "default" + ], + "renewable": true, + "token_policies": [ + "alt-policy", + "approle-policy", + "default" + ], + "token_type": "service" + }, + "data": null, + "lease_duration": 0, + "lease_id": "", + "renewable": false, + "request_id": "b35b7ff6-c1ce-f61d-deef-805ac3ae13dc", + "warnings": null, + "wrap_info": null +} diff --git a/ansible_collections/community/hashi_vault/tests/unit/fixtures/approle_secret_id_write_response.json b/ansible_collections/community/hashi_vault/tests/unit/fixtures/approle_secret_id_write_response.json new file mode 100644 index 000000000..f85bcbe01 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/fixtures/approle_secret_id_write_response.json @@ -0,0 +1,14 @@ +{ + "auth": null, + "data": { + "secret_id": "41b12758-8c6f-0896-c761-92e05675023c", + "secret_id_accessor": "b0ab25c8-a8eb-3b31-3830-663840d5f504", + "secret_id_ttl": 3600 + }, + "lease_duration": 0, + "lease_id": "", + "renewable": false, + "request_id": "5e461200-18f2-0f18-4601-6bf2b9368cb5", + "warnings": null, + "wrap_info": null +} diff --git a/ansible_collections/community/hashi_vault/tests/unit/fixtures/aws_iam_login_response.json b/ansible_collections/community/hashi_vault/tests/unit/fixtures/aws_iam_login_response.json new file mode 100644 index 000000000..c02639e4b --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/fixtures/aws_iam_login_response.json @@ -0,0 +1,30 @@ +{ + "request_id": "ec0d300f-ac44-4f5b-9feb-282d3a6686a7", + "lease_id": "", + "lease_duration": 0, + "renewable": false, + "data": null, + "warnings": null, + "auth": { + "client_token": "s.YXZDqrOgv3mhlcPXpRBrS2cE", + "accessor": "Xkad5E1bHRBJApR03pGrp1a0", + "policies": [ + "default", + "aws-sample-policy" + ], + "token_policies": [ + "default", + "aws-sample-policy" + ], + "identity_policies": null, + "metadata": { + "account_id": "064281349855", + "auth_type": "iam", + "role_id": "b9462e71-e600-418d-b14e-fa69627470ec" + }, + "orphan": true, + "entity_id": "e23d3bad-7485-4330-bf74-d64fc1e774e4", + "lease_duration": 1800, + "renewable": true + } +} diff --git a/ansible_collections/community/hashi_vault/tests/unit/fixtures/azure_login_response.json b/ansible_collections/community/hashi_vault/tests/unit/fixtures/azure_login_response.json new file mode 100644 index 000000000..f1d1302e5 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/fixtures/azure_login_response.json @@ -0,0 +1,33 @@ +{ + "request_id": "cbfb16b9-4cf6-917d-182b-170801fc5a4e", + "lease_id": "", + "renewable": false, + "lease_duration": 0, + "data": null, + "wrap_info": null, + "warnings": null, + "auth": { + "client_token": "hvs.CAESIH6iy4yyvKMpk-vcaaVvU8nGfZFRCcH92hVa24lGNxHNGh4KHGh2cy5qU29Ua1FscTJIQ3BBY1AwTDM4dzNpR0E", + "accessor": "60U0DvUOIMOIGI7kzAneeD2x", + "policies": [ + "default", + "azure-sample-policy" + ], + "token_policies": [ + "default", + "azure-sample-policy" + ], + "metadata": { + "resource_group_name": "", + "role": "msi-vault", + "subscription_id": "" + }, + "lease_duration": 2764800, + "renewable": true, + "entity_id": "ff6a9d66-c2eb-6b78-e463-b3192243b5c1", + "token_type": "service", + "orphan": true, + "mfa_requirement": null, + "num_uses": 0 + } +} diff --git a/ansible_collections/community/hashi_vault/tests/unit/fixtures/cert_login_response.json b/ansible_collections/community/hashi_vault/tests/unit/fixtures/cert_login_response.json new file mode 100644 index 000000000..20096fd45 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/fixtures/cert_login_response.json @@ -0,0 +1,35 @@ +{ + "auth": { + "accessor": "f69aXFTLzAE1e5pPDpAqNAFW", + "client_token": "s.bJ8UmS3NbYH3XJD7P70Yiyml", + "entity_id": "84590d6b-54a2-7d81-201c-6107353169fb", + "lease_duration": 3600, + "metadata": { + "authority_key_id": "66:45:2e:ae:d1:39:c8:d8:0d:fd:e7:d8:0f:8a:49:ee:f7:cc:53:ae", + "cert_name": "vault_test", + "common_name": "vault-test", + "serial_number": "657513290402968240784573665154053221879835701422", + "subject_key_id": "66:45:2e:ae:d1:39:c8:d8:0d:fd:e7:d8:0f:8a:49:ee:f7:cc:53:ae" + }, + "orphan": true, + "policies": [ + "approle-policy", + "default", + "test-policy" + ], + "renewable": true, + "token_policies": [ + "approle-policy", + "default", + "test-policy" + ], + "token_type": "service" + }, + "data": null, + "lease_duration": 0, + "lease_id": "", + "renewable": false, + "request_id": "9016334e-8bbb-4390-5512-c9b526b39bd3", + "warnings": null, + "wrap_info": null +} diff --git a/ansible_collections/community/hashi_vault/tests/unit/fixtures/jwt_login_response.json b/ansible_collections/community/hashi_vault/tests/unit/fixtures/jwt_login_response.json new file mode 100644 index 000000000..c67ba4914 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/fixtures/jwt_login_response.json @@ -0,0 +1,29 @@ +{ + "auth": { + "accessor": "3QbZdd50wZFaUqBVb6v6vXhG", + "client_token": "s.8PtJkzREM9ZIYWQ28cSGqtP6", + "entity_id": "b708d9c6-38fa-2f45-0cfd-1f36c11f3acb", + "lease_duration": 3600, + "metadata": { + "role": "test-role" + }, + "orphan": true, + "policies": [ + "default", + "test-policy" + ], + "renewable": true, + "token_policies": [ + "default", + "test-policy" + ], + "token_type": "service" + }, + "data": null, + "lease_duration": 0, + "lease_id": "", + "renewable": false, + "request_id": "797bbe1d-4a95-c078-ecd2-2eff4c4fdaed", + "warnings": null, + "wrap_info": null +} diff --git a/ansible_collections/community/hashi_vault/tests/unit/fixtures/kv1_get_response.json b/ansible_collections/community/hashi_vault/tests/unit/fixtures/kv1_get_response.json new file mode 100644 index 000000000..2f850d642 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/fixtures/kv1_get_response.json @@ -0,0 +1,13 @@ +{ + "auth": null, + "data": { + "Key1": "val1", + "Key2": "val2" + }, + "lease_duration": 2764800, + "lease_id": "", + "renewable": false, + "request_id": "e26a7521-e512-82f1-3998-7cc494f14e86", + "warnings": null, + "wrap_info": null +} diff --git a/ansible_collections/community/hashi_vault/tests/unit/fixtures/kv2_get_response.json b/ansible_collections/community/hashi_vault/tests/unit/fixtures/kv2_get_response.json new file mode 100644 index 000000000..deb872387 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/fixtures/kv2_get_response.json @@ -0,0 +1,22 @@ +{ + "auth": null, + "data": { + "data": { + "Key1": "val1", + "Key2": "val2" + }, + "metadata": { + "created_time": "2022-04-21T15:56:58.8525402Z", + "custom_metadata": null, + "deletion_time": "", + "destroyed": false, + "version": 2 + } + }, + "lease_duration": 0, + "lease_id": "", + "renewable": false, + "request_id": "15538d55-0ad9-1c39-2f4b-dcbb982f13cc", + "warnings": null, + "wrap_info": null +} diff --git a/ansible_collections/community/hashi_vault/tests/unit/fixtures/kv2_list_response.json b/ansible_collections/community/hashi_vault/tests/unit/fixtures/kv2_list_response.json new file mode 100644 index 000000000..2fe833b51 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/fixtures/kv2_list_response.json @@ -0,0 +1,15 @@ +{ + "auth": null, + "data": { + "keys": [ + "Secret1", + "Secret2" + ] + }, + "lease_duration": 0, + "lease_id": "", + "renewable": false, + "request_id": "02e4b52a-23b1-9a1c-cf2b-3799edb17fed", + "warnings": null, + "wrap_info": null +} diff --git a/ansible_collections/community/hashi_vault/tests/unit/fixtures/ldap_login_response.json b/ansible_collections/community/hashi_vault/tests/unit/fixtures/ldap_login_response.json new file mode 100644 index 000000000..474a6bd0c --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/fixtures/ldap_login_response.json @@ -0,0 +1,28 @@ +{ + "request_id": "30fd9f34-83af-4921-be0c-b93e41dc3959", + "lease_id": "", + "lease_duration": 0, + "renewable": false, + "data": {}, + "warnings": null, + "auth": { + "client_token": "s.fjXSOvsGY3Q95XGyJKnDw7OC", + "accessor": "VnnNWBasAnVn1YO4cVL9jJei", + "policies": [ + "default", + "test-policy" + ], + "token_policies": [ + "default", + "test-policy" + ], + "identity_policies": null, + "metadata": { + "username": "ldapuser" + }, + "orphan": true, + "entity_id": "08e5b262-7dc2-4edd-8fc7-77882ca7cc1b", + "lease_duration": 3600, + "renewable": true + } +} diff --git a/ansible_collections/community/hashi_vault/tests/unit/fixtures/lookup-self_with_meta.json b/ansible_collections/community/hashi_vault/tests/unit/fixtures/lookup-self_with_meta.json new file mode 100644 index 000000000..6936d2b9b --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/fixtures/lookup-self_with_meta.json @@ -0,0 +1,24 @@ +{ + "auth": null, + "data": { + "accessor": "8609694a-cdbc-db9b-d345-e782dbb562ed", + "creation_time": 1523979354, + "creation_ttl": 2764800, + "display_name": "ldap2-tesla", + "entity_id": "7d2e3179-f69b-450c-7179-ac8ee8bd8ca9", + "expire_time": "2018-05-19T11:35:54.466476215-04:00", + "explicit_max_ttl": 0, + "id": "cf64a70f-3a12-3f6c-791d-6cef6d390eed", + "identity_policies": ["dev-group-policy"], + "issue_time": "2018-04-17T11:35:54.466476078-04:00", + "meta": { + "username": "tesla" + }, + "num_uses": 0, + "orphan": true, + "path": "auth/ldap2/login/tesla", + "policies": ["default", "testgroup2-policy"], + "renewable": true, + "ttl": 2764790 + } +} diff --git a/ansible_collections/community/hashi_vault/tests/unit/fixtures/lookup-self_without_meta.json b/ansible_collections/community/hashi_vault/tests/unit/fixtures/lookup-self_without_meta.json new file mode 100644 index 000000000..eaf006c25 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/fixtures/lookup-self_without_meta.json @@ -0,0 +1,21 @@ +{ + "auth": null, + "data": { + "accessor": "8609694a-cdbc-db9b-d345-e782dbb562ed", + "creation_time": 1523979354, + "creation_ttl": 2764800, + "display_name": "ldap2-tesla", + "entity_id": "7d2e3179-f69b-450c-7179-ac8ee8bd8ca9", + "expire_time": "2018-05-19T11:35:54.466476215-04:00", + "explicit_max_ttl": 0, + "id": "cf64a70f-3a12-3f6c-791d-6cef6d390eed", + "identity_policies": ["dev-group-policy"], + "issue_time": "2018-04-17T11:35:54.466476078-04:00", + "num_uses": 0, + "orphan": true, + "path": "auth/ldap2/login/tesla", + "policies": ["default", "testgroup2-policy"], + "renewable": true, + "ttl": 2764790 + } +} diff --git a/ansible_collections/community/hashi_vault/tests/unit/fixtures/pki_generate_certificate_response.json b/ansible_collections/community/hashi_vault/tests/unit/fixtures/pki_generate_certificate_response.json new file mode 100644 index 000000000..39de2e604 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/fixtures/pki_generate_certificate_response.json @@ -0,0 +1,17 @@ +{ + "auth": null, + "data": { + "certificate": "-----BEGIN CERTIFICATE-----\nMIID/jCCAuagAwIBAgIUGlFiKFaKT3YFut6MIAEBjxdERtUwDQYJKoZIhvcNAQEL\nBQAwGTEXMBUGA1UEAxMOY2EuZXhhbXBsZS5vcmcwHhcNMjIwMjEzMTgwNzU2WhcN\nMjIwMjE0MTgwODI2WjAhMR8wHQYDVQQDExZkdW1teS50ZXN0LmV4YW1wbGUub3Jn\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxVgdHP1hWPSisopSyAG5\nX6PzH0pvOd5DU1HNFwE6OELzj3LlWnvMhoPNI5YAjLX+R0z461YfuWmWDwRvwMXu\nb3ErZWcB04+9iZ2zIcpq2Bc3GzVWWRl3uB8pNDYN2EWwgc14z71yxD4A0mVBR9GG\nloP0ntLSfKAccdsEQ8Pd5WJKLN6QcaQ6nO2oc4qJT6F19c27nElpuq0Xd0j5hZg9\nfi+SMA/PZ+p6Ego46Wm5gGkD/AzIQ0ElSnQrw0dLc0t6ktis0Ln3IqW4SbooWqLU\nE5+30T/fLlnIoJpmjQqj4Gh96wKpOuQ+9vMA+0ODuFfMrV5cHCxwoPteJ/EXUSzr\n2wIDAQABo4IBNDCCATAwDgYDVR0PAQH/BAQDAgOoMB0GA1UdJQQWMBQGCCsGAQUF\nBwMBBggrBgEFBQcDAjAdBgNVHQ4EFgQUsUQZM+FDLd6A4AzMJ9l/FITktF8wHwYD\nVR0jBBgwFoAU2MqIFsUSq2DSn6Bps5AKsVRven8wOQYIKwYBBQUHAQEELTArMCkG\nCCsGAQUFBzAChh1odHRwOi8vbXl2YXVsdDo4MjAwL3YxL3BraS9jYTBTBgNVHREE\nTDBKghZkdW1teS50ZXN0LmV4YW1wbGUub3JnghdkdW1teTIudGVzdC5leGFtcGxl\nLm9yZ4IXZHVtbXkzLnRlc3QuZXhhbXBsZS5vcmcwLwYDVR0fBCgwJjAkoCKgIIYe\naHR0cDovL215dmF1bHQ6ODIwMC92MS9wa2kvY3JsMA0GCSqGSIb3DQEBCwUAA4IB\nAQB68sBRsYnAlZqwypVoJlvRqtqwvgdQF9tgTol+fHrKDFSeFDJbhQZY9QRI+juZ\n1CAWClCK5O4f0PGfozaDfn+Iph6wuc+H49MY3Z/wgwSvg2sQYOvUP6HZMk0XajDQ\ntJP1F/yPQrZ7e7WVQy9SdvLd/QwGjwCyRFvK2DS5IzImUzTreycUK7Fr7Vy+Rlj0\n1O5JMMJen1z2G5lqdeW3dthMM+LH2o7gSgms9RLd66Y/p+eCyXhPxI9TlJx84kqw\nu8MPJoEz9x2oX2bxuTLw6pmV7W7zH9YB5pZm2q9k5sDyFX4khvUTmBuOTQdcYO4W\nsgvZp3hDe9Hjh6WrQrxVUNfO\n-----END CERTIFICATE-----", + "expiration": 1644862106, + "issuing_ca": "-----BEGIN CERTIFICATE-----\nMIIDPjCCAiagAwIBAgIUD2KlA6b1Dgd0db97iymVMC8kG64wDQYJKoZIhvcNAQEL\nBQAwGTEXMBUGA1UEAxMOY2EuZXhhbXBsZS5vcmcwHhcNMjIwMjEzMTgwNzI4WhcN\nMjIwMzE3MTgwNzU4WjAZMRcwFQYDVQQDEw5jYS5leGFtcGxlLm9yZzCCASIwDQYJ\nKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKYCpLqrJRbGglK2V0qHFZBd3tJIc3Qv\nMHDXA0SNXiEo+6RzTOqYAh72/7YeXGOTyD2f6wfdcwTuH1NYvfC+Oz1nDubdKn2F\nCdwKtuVAZ9BLSoxHH+NdntYaIVxv/7swpjHHFIxmA0ZN5auGWHfxe9kfeQc/ul3S\nF+bBQpt2syiLn0RheXUsf/2r/LcfZ93W4fD1G8yOU6xdsGpErwbXezfoaz2adDLQ\nXlj0rizpxR5zSKFBjCYMYVZJ06/22Pn1ePsWwGHrdJe1VuVab8k8uGVzvn4rNVOE\n3brfD6Jo3ua+xUIzrOmKQwmQ5GrUCRHvUW3IyecFyauinzopWtmFyGkCAwEAAaN+\nMHwwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNjK\niBbFEqtg0p+gabOQCrFUb3p/MB8GA1UdIwQYMBaAFNjKiBbFEqtg0p+gabOQCrFU\nb3p/MBkGA1UdEQQSMBCCDmNhLmV4YW1wbGUub3JnMA0GCSqGSIb3DQEBCwUAA4IB\nAQB1ItzmiOYJ70LaAdvzJN3VFexV4ZruM2QTgNbAHkIPgJQpGxoc3Y2+VaX0nWUR\nNWFLvw1vuM+rjjUe87X+mXoc6iPn6yZxmJ4QfaEkDgc+7jlr5T9STKgrnXQGIOF6\nO1pJiLfr+Rq5PpsdaQnN31DOpQIoeEVnf2c+2KmMIYt+E7JNRo+NS7lF3amgY6EZ\npOnbLLGQB+GM/EvPhDlMGrmFcW6Zk2GpyPMq1BtI/OJOcF1SB+R8ZOQ9ygHznt8f\nIs6d9xC0FlBLo901mp3NhrRI7nUJxdtFdZChgqjRYMB4kooQEC5RELbCUC3UvccO\ng9ne4DQhA/CVOOGYUpyHbOcP\n-----END CERTIFICATE-----", + "private_key": "-----BEGIN RSA PRIVATE KEY-----\nMIIEpQIBAAKCAQEAxVgdHP1hWPSisopSyAG5X6PzH0pvOd5DU1HNFwE6OELzj3Ll\nWnvMhoPNI5YAjLX+R0z461YfuWmWDwRvwMXub3ErZWcB04+9iZ2zIcpq2Bc3GzVW\nWRl3uB8pNDYN2EWwgc14z71yxD4A0mVBR9GGloP0ntLSfKAccdsEQ8Pd5WJKLN6Q\ncaQ6nO2oc4qJT6F19c27nElpuq0Xd0j5hZg9fi+SMA/PZ+p6Ego46Wm5gGkD/AzI\nQ0ElSnQrw0dLc0t6ktis0Ln3IqW4SbooWqLUE5+30T/fLlnIoJpmjQqj4Gh96wKp\nOuQ+9vMA+0ODuFfMrV5cHCxwoPteJ/EXUSzr2wIDAQABAoIBAQCiI/FXnj9bbTQ3\n6Tp2piP+lp/st6WHMDy0umL9Yb7J9whSdh5HJ6w1YRktAdPVyLnxLybdhNdv6Xan\nRAflpTpwSdVT7Tws7M7XwMArJTp/7SMTsdEOR8R7fO7HvRnG9gs9uupmFMu0vRTD\nyPnH3jjsdeKIk8LpLkvwp/hrDQTFr3aZnIcRz0pmZAAxMxFgmKN/ZR0S5PVjTdIU\nUACPr90vciixITY3u0C5hc5IPNKMLm8D/4E65J9D7Vjcs05bU+6ecfYYFnQtIhaH\ncv+t3nk5sfuGTy6ozBQ820V/YCN5wS840wSrqLFmUz8vdSFTqJUZbMoDYQjcPvxq\nh+0UajJRAoGBAMlfuk++vnGNVL4wTGb6GdUqmExdJ6nnfZ+uq19FgL8Q0zwhEKtQ\nTytkDsmPf+T2NfuEby8xow1hmxzlEUAq70vnvDq6VEqUtcJNVU3TggkLutWmslbY\nTj0mGFLHVoue6L+ElDubjKSklNCZcNXAmbb98dbuuM5mvLLtbJU2Zk1fAoGBAPrg\nixXBolvifJOiNWcJCuTT4K6j8RyKwx7PofGd6T1pIeooi/T4/dLiLB6y9zbwhrZq\nXh9H4kKBZqMp9K+7GVZMZxhYWKCCTpxvpmGrX8hINFm8Rz2KpKW6MvRaUKs2NYsj\npE5r51lJoF8EuQf47nhpufX/C3TTzzS3OKQWMjcFAoGAeUQmhGNPeD4t7CJVwCWY\nbOArusDWY+C9q+2Z0dOfBnBxZGJdEW1ZX73vkb3SvOTv+Tj1Y6w2jpZavHnNe6Df\nXgx9M7iFjiwjkJDVb/qQ8jWYG5U5DEdSRkyslRzpp0bYzoxeX876USOzYjMk2fQU\nHTir7EzyCYmg1PdZTjnmPW0CgYEA+n2Q4dxA3DW75TykzYf91JSpVjZi2/jA8dam\n/7SH2cVLE54AgEzMQu+I1e4jYDuwhhqWd+0yQO0rKecOZRgPKFeI6Intk/YHv7LL\nEeIm9LcDbkXLa+sukjrj/Y7f1NN/irm/qH2ctU4KTlVM2mT21kvaXYCWU8PYs+3t\nJAj1gnECgYEApvShoOEUOQyEgxOoCNr2+tesK7PiLcFTbeDkzc4eXgn8+nzazMMn\nBDr8siHE2GBetcw5e4NkNy5sY+vV09ruLEhLES+QvHB/9+5/HKkx5XXRk0Bkw8DJ\nJx7qr67MRYF1yP242H9Iqf/fbnZepw3lqb7OJkDib8+CXYthMkcffgs=\n-----END RSA PRIVATE KEY-----", + "private_key_type": "rsa", + "serial_number": "1a:51:62:28:56:8a:4f:76:05:ba:de:8c:20:01:01:8f:17:44:46:d5" + }, + "lease_duration": 0, + "lease_id": "", + "renewable": false, + "request_id": "6a17b0ab-3bfa-a2a4-4b7d-d23aad10e021", + "warnings": null, + "wrap_info": null +} diff --git a/ansible_collections/community/hashi_vault/tests/unit/fixtures/policy_list_response.json b/ansible_collections/community/hashi_vault/tests/unit/fixtures/policy_list_response.json new file mode 100644 index 000000000..5a7dfdb8d --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/fixtures/policy_list_response.json @@ -0,0 +1,15 @@ +{ + "auth": null, + "data": { + "keys": [ + "Policy1", + "Policy2" + ] + }, + "lease_duration": 0, + "lease_id": "", + "renewable": false, + "request_id": "96f2857e-5e33-1957-ea7e-be58f483faa3", + "warnings": null, + "wrap_info": null +} diff --git a/ansible_collections/community/hashi_vault/tests/unit/fixtures/token_create_response.json b/ansible_collections/community/hashi_vault/tests/unit/fixtures/token_create_response.json new file mode 100644 index 000000000..f0771bf0b --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/fixtures/token_create_response.json @@ -0,0 +1,27 @@ +{ + "auth": { + "accessor": "ag7UbiKYw1HNvkUlz0EAmJF1", + "client_token": "s.rlwajI2bblHAWU7uPqZhLru3", + "entity_id": "44133048-b0f9-c0b1-29dc-5d2e62f73b0c", + "lease_duration": 60, + "metadata": null, + "orphan": false, + "policies": [ + "test", + "default" + ], + "renewable": true, + "token_policies": [ + "test", + "default" + ], + "token_type": "service" + }, + "data": null, + "lease_duration": 0, + "lease_id": "", + "renewable": false, + "request_id": "61138ea3-a6ff-8735-102f-4e0087e1b3f4", + "warnings": null, + "wrap_info": null +} diff --git a/ansible_collections/community/hashi_vault/tests/unit/fixtures/userpass_list_response.json b/ansible_collections/community/hashi_vault/tests/unit/fixtures/userpass_list_response.json new file mode 100644 index 000000000..84cabf3bb --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/fixtures/userpass_list_response.json @@ -0,0 +1,15 @@ +{ + "auth": null, + "data": { + "keys": [ + "User1", + "User2" + ] + }, + "lease_duration": 0, + "lease_id": "", + "renewable": false, + "request_id": "8b18a5ca-9baf-eb7c-18a6-11be81ed95a6", + "warnings": null, + "wrap_info": null +} diff --git a/ansible_collections/community/hashi_vault/tests/unit/fixtures/userpass_login_response.json b/ansible_collections/community/hashi_vault/tests/unit/fixtures/userpass_login_response.json new file mode 100644 index 000000000..66e7f1ced --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/fixtures/userpass_login_response.json @@ -0,0 +1,31 @@ +{ + "auth": { + "accessor": "mQewzgKRx5Yui1h1eMemJlMu", + "client_token": "s.drgLxu6ZtttSVn5Zkoy0huMR", + "entity_id": "8a74ffd3-f71b-8ebe-7942-610428051ea9", + "lease_duration": 3600, + "metadata": { + "username": "testuser" + }, + "orphan": true, + "policies": [ + "alt-policy", + "default", + "userpass-policy" + ], + "renewable": true, + "token_policies": [ + "alt-policy", + "default", + "userpass-policy" + ], + "token_type": "service" + }, + "data": null, + "lease_duration": 0, + "lease_id": "", + "renewable": false, + "request_id": "511e8fba-83f0-4b7e-95ea-770aa19c1957", + "warnings": null, + "wrap_info": null +} diff --git a/ansible_collections/community/hashi_vault/tests/unit/fixtures/vault-token b/ansible_collections/community/hashi_vault/tests/unit/fixtures/vault-token new file mode 100644 index 000000000..e255ebf19 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/fixtures/vault-token @@ -0,0 +1 @@ +token-value diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/filter/test_filter_vault_login_token.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/filter/test_filter_vault_login_token.py new file mode 100644 index 000000000..0b1390778 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/filter/test_filter_vault_login_token.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest + +from ansible.errors import AnsibleError + +from ansible_collections.community.hashi_vault.plugins.filter.vault_login_token import vault_login_token + + +@pytest.fixture +def login_response(fixture_loader): + return fixture_loader('userpass_login_response.json') + + +@pytest.fixture +def module_login_response(login_response): + return { + "login": login_response + } + + +def test_vault_login_token_login_response(login_response): + token = vault_login_token(login_response) + + assert token == login_response['auth']['client_token'] + + +@pytest.mark.parametrize('optional_field', ['other', 'another']) +def test_vault_login_token_login_response_alternate_optionals(login_response, optional_field): + token = vault_login_token(login_response, optional_field=optional_field) + + assert token == login_response['auth']['client_token'] + + +def test_vault_login_token_module_login_response(module_login_response): + token = vault_login_token(module_login_response) + + assert token == module_login_response['login']['auth']['client_token'] + + +@pytest.mark.parametrize('optional_field', ['other', 'another']) +def test_vault_login_token_module_wrong_field(module_login_response, optional_field): + with pytest.raises(AnsibleError, match=r"Could not find 'auth' or 'auth\.client_token' fields\. Input may not be a Vault login response\."): + vault_login_token(module_login_response, optional_field=optional_field) + + +@pytest.mark.parametrize('input', [1, 'string', ['array'], ('tuple',), False]) +def test_vault_login_token_wrong_types(input): + with pytest.raises(AnsibleError, match=r"The 'vault_login_token' filter expects a dictionary\."): + vault_login_token(input) diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/conftest.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/conftest.py new file mode 100644 index 000000000..ba4da9291 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/conftest.py @@ -0,0 +1,17 @@ +# Copyright (c) 2022 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest + + +@pytest.fixture +def minimal_vars(): + return { + 'ansible_hashi_vault_auth_method': 'token', + 'ansible_hashi_vault_url': 'http://myvault', + 'ansible_hashi_vault_token': 'throwaway', + } diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_hashi_vault.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_hashi_vault.py new file mode 100644 index 000000000..aa2582a18 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_hashi_vault.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import pytest + +from ansible.plugins.loader import lookup_loader + +from ansible.module_utils.six.moves.urllib.parse import urlparse + +from ansible_collections.community.hashi_vault.tests.unit.compat import mock + +from ansible_collections.community.hashi_vault.plugins.plugin_utils._hashi_vault_lookup_base import HashiVaultLookupBase + +from requests.exceptions import ConnectionError + + +@pytest.fixture +def hashi_vault_lookup_module(): + return lookup_loader.get('community.hashi_vault.hashi_vault') + + +class TestHashiVaultLookup(object): + + def test_is_hashi_vault_lookup_base(self, hashi_vault_lookup_module): + assert issubclass(type(hashi_vault_lookup_module), HashiVaultLookupBase) + + # TODO: this test should be decoupled from the hashi_vault lookup and moved to the connection options tests + @pytest.mark.parametrize( + 'envpatch,expected', + [ + ({'VAULT_ADDR': 'http://vault:0'}, 'http://vault:0'), + ({'ANSIBLE_HASHI_VAULT_ADDR': 'https://vaultalt'}, 'https://vaultalt'), + ({'VAULT_ADDR': 'https://vaultlow:8443', 'ANSIBLE_HASHI_VAULT_ADDR': 'http://vaulthigh:8200'}, 'http://vaulthigh:8200'), + ], + ) + def test_vault_addr_low_pref(self, hashi_vault_lookup_module, envpatch, expected): + url = urlparse(expected) + host = url.hostname + port = url.port if url.port is not None else {'http': 80, 'https': 443}[url.scheme] + + with mock.patch.dict(os.environ, envpatch): + with pytest.raises(ConnectionError) as e: + hashi_vault_lookup_module.run(['secret/fake'], token='fake') + + s_err = str(e.value) + + assert str(host) in s_err, "host '%s' not found in exception: %r" % (host, str(e.value)) + assert str(port) in s_err, "port '%i' not found in exception: %r" % (port, str(e.value)) diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_vault_ansible_settings.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_vault_ansible_settings.py new file mode 100644 index 000000000..4d3318360 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_vault_ansible_settings.py @@ -0,0 +1,157 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest +import re + +from ansible.plugins.lookup import LookupBase +from ansible.errors import AnsibleError + +from ...compat import mock + + +OPTIONS = { + '_terms': (None, 'default'), + '_private1': (1, 'default'), + '_private2': (2, 'env'), + '_private3': (None, 'variable'), + 'optionA': ('A', 'env'), + 'optionB': ('B', 'default'), + 'optionC': ('C', '/tmp/ansible.cfg'), + 'Doption': ('D', 'variable'), +} + + +@pytest.fixture +def sample_options(): + return OPTIONS.copy() + + +@pytest.fixture +def patch_config_manager(sample_options): + def _config_value_and_origin(name, *args, **kwargs): + return sample_options[name] + + from ansible.constants import config as C + config = mock.Mock(wraps=C) + config.get_configuration_definitions.return_value = sample_options.copy() + config.get_config_value_and_origin = mock.Mock(wraps=_config_value_and_origin) + + with mock.patch('ansible.constants.config', config): + yield config + + +@pytest.fixture +def vault_ansible_settings_lookup(loader): + return loader.get('community.hashi_vault.vault_ansible_settings') + + +@pytest.fixture(params=[False, True]) +def loader(request): + from ansible.plugins.loader import lookup_loader as orig_loader + + def _legacy_sim(plugin): + r = orig_loader.find_plugin_with_context(plugin) + return (r.plugin_resolved_name, None) + + loader = mock.Mock(wraps=orig_loader) + + if request.param: + loader.find_plugin_with_context.side_effect = AttributeError + loader.find_plugin_with_name = mock.Mock(wraps=_legacy_sim) + + with mock.patch('ansible.plugins.loader.lookup_loader', loader): + yield loader + + +class TestVaultAnsibleSettingsLookup(object): + + def test_vault_ansible_settings_is_lookup_base(self, vault_ansible_settings_lookup): + assert issubclass(type(vault_ansible_settings_lookup), LookupBase) + + @pytest.mark.parametrize('opt_plugin', ['community.hashi_vault.vault_login', 'vault_login'], ids=['plugin=fqcn', 'plugin=short']) + @pytest.mark.parametrize('opt_inc_none', [True, False], ids=lambda x: 'none=%s' % x) + @pytest.mark.parametrize('opt_inc_default', [True, False], ids=lambda x: 'default=%s' % x) + @pytest.mark.parametrize('opt_inc_private', [True, False], ids=lambda x: 'private=%s' % x) + @pytest.mark.parametrize('variables', [ + {}, + dict(ansible_hashi_vault_retries=7, ansible_hashi_vault_url='https://the.money.bin'), + ]) + @pytest.mark.parametrize(['terms', 'expected'], [ + ([], ['_terms', '_private1', '_private2', '_private3', 'optionA', 'optionB', 'optionC', 'Doption']), + (['*'], ['_terms', '_private1', '_private2', '_private3', 'optionA', 'optionB', 'optionC', 'Doption']), + (['opt*'], ['optionA', 'optionB', 'optionC']), + (['*', '!opt*'], ['_terms', '_private1', '_private2', '_private3', 'Doption']), + (['*', '!*opt*', 'option[B-C]'], ['_terms', '_private1', '_private2', '_private3', 'optionB', 'optionC']), + ]) + def test_vault_ansible_settings_stuff( + self, vault_ansible_settings_lookup, + opt_plugin, opt_inc_none, opt_inc_default, opt_inc_private, variables, terms, expected, + patch_config_manager, sample_options, loader + ): + kwargs = dict( + plugin=opt_plugin, + include_default=opt_inc_default, + include_none=opt_inc_none, + include_private=opt_inc_private + ) + + result = vault_ansible_settings_lookup.run(terms, variables, **kwargs) + + # this lookup always returns a single dictionary + assert isinstance(result, list) + assert len(result) == 1 + deresult = result[0] + assert isinstance(deresult, dict) + + patch_config_manager.get_configuration_definitions.assert_called_once() + + fqplugin = re.sub(r'^(?:community\.hashi_vault\.)?(.*?)$', r'community.hashi_vault.\1', opt_plugin) + if hasattr(loader, 'find_plugin_with_name'): + loader.find_plugin_with_name.assert_called_once_with(fqplugin) + else: + loader.find_plugin_with_context.assert_called_once_with(fqplugin) + + # the calls to get_config_value_and_origin vary, get the whole list of calls + cvocalls = patch_config_manager.get_config_value_and_origin.call_args_list + + for call in cvocalls: + # 1) ensure variables are always included in this call + # 2) ensure this method is only called for expected keys (after filtering) + margs, mkwargs = call + assert 'variables' in mkwargs and mkwargs['variables'] == variables, call + assert margs[0] in expected + + # go through all expected keys, ensuring they are in the result, + # or that they had a reason not to be. + for ex in expected: + skip_private = not opt_inc_private and ex.startswith('_') + skip_none = not opt_inc_none and sample_options[ex][0] is None + skip_default = not opt_inc_default and sample_options[ex][1] == 'default' + skip = skip_private or skip_none or skip_default + + assert ex in deresult or skip + + # ensure all expected keys (other than skipped private) had their values checked + if not skip_private: + assert any(call[0][0] == ex for call in cvocalls) + + # now check the results: + # 1) ensure private values are not present when they should not be + # 2) ensure None values are not present when they should not be + # 3) ensure values derived from defaults are not present when they should not be + # 4) ensure the value returned is the correct value + for k, v in deresult.items(): + assert opt_inc_private or not k.startswith('_') + assert opt_inc_none or v is not None + assert opt_inc_default or sample_options[k][1] != 'default' + assert v == sample_options[k][0] + + def test_vault_ansible_settings_plugin_not_found(self, vault_ansible_settings_lookup): + with pytest.raises(AnsibleError, match=r"'_ns._col._fake' plugin not found\."): + vault_ansible_settings_lookup.run([], plugin='_ns._col._fake') diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_vault_kv1_get.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_vault_kv1_get.py new file mode 100644 index 000000000..aa616d94d --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_vault_kv1_get.py @@ -0,0 +1,135 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +import pytest + +from ansible.plugins.loader import lookup_loader +from ansible.errors import AnsibleError + +from ...compat import mock + +from .....plugins.plugin_utils._hashi_vault_lookup_base import HashiVaultLookupBase +from .....plugins.module_utils._hashi_vault_common import HashiVaultValueError + +from .....plugins.lookup import vault_kv1_get + + +hvac = pytest.importorskip('hvac') + + +pytestmark = pytest.mark.usefixtures( + 'patch_authenticator', + 'patch_get_vault_client', +) + + +@pytest.fixture +def vault_kv1_get_lookup(): + return lookup_loader.get('community.hashi_vault.vault_kv1_get') + + +@pytest.fixture +def kv1_get_response(fixture_loader): + return fixture_loader('kv1_get_response.json') + + +class TestVaultKv1GetLookup(object): + + def test_vault_kv1_get_is_lookup_base(self, vault_kv1_get_lookup): + assert issubclass(type(vault_kv1_get_lookup), HashiVaultLookupBase) + + def test_vault_kv1_get_no_hvac(self, vault_kv1_get_lookup, minimal_vars): + with mock.patch.object(vault_kv1_get, 'HVAC_IMPORT_ERROR', new=ImportError()): + with pytest.raises(AnsibleError, match=r"This plugin requires the 'hvac' Python library"): + vault_kv1_get_lookup.run(terms='fake', variables=minimal_vars) + + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_kv1_get_authentication_error(self, vault_kv1_get_lookup, minimal_vars, authenticator, exc): + authenticator.authenticate.side_effect = exc + + with pytest.raises(AnsibleError, match=r'throwaway msg'): + vault_kv1_get_lookup.run(terms='fake', variables=minimal_vars) + + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_kv1_get_auth_validation_error(self, vault_kv1_get_lookup, minimal_vars, authenticator, exc): + authenticator.validate.side_effect = exc + + with pytest.raises(AnsibleError, match=r'throwaway msg'): + vault_kv1_get_lookup.run(terms='fake', variables=minimal_vars) + + @pytest.mark.parametrize('paths', [['fake1'], ['fake2', 'fake3']]) + @pytest.mark.parametrize('engine_mount_point', ['kv', 'other']) + def test_vault_kv1_get_return_data(self, vault_kv1_get_lookup, minimal_vars, kv1_get_response, vault_client, paths, engine_mount_point): + client = vault_client + + expected_calls = [mock.call(path=p, mount_point=engine_mount_point) for p in paths] + + expected = {} + expected['raw'] = kv1_get_response.copy() + expected['metadata'] = kv1_get_response.copy() + expected['data'] = expected['metadata'].pop('data') + expected['secret'] = expected['data'] + + def _fake_kv1_get(path, mount_point): + r = kv1_get_response.copy() + r['data'] = r['data'].copy() + r['data'].update({'_path': path}) + r['data'].update({'_mount': mount_point}) + return r + + client.secrets.kv.v1.read_secret = mock.Mock(wraps=_fake_kv1_get) + + response = vault_kv1_get_lookup.run(terms=paths, variables=minimal_vars, engine_mount_point=engine_mount_point) + + client.secrets.kv.v1.read_secret.assert_has_calls(expected_calls) + + assert len(response) == len(paths), "%i paths processed but got %i responses" % (len(paths), len(response)) + + for p in paths: + r = response.pop(0) + ins_p = r['secret'].pop('_path') + ins_m = r['secret'].pop('_mount') + assert p == ins_p, "expected '_path=%s' field was not found in response, got %r" % (p, ins_p) + assert engine_mount_point == ins_m, "expected '_mount=%s' field was not found in response, got %r" % (engine_mount_point, ins_m) + assert r['raw'] == expected['raw'], ( + "remaining response did not match expected\nresponse: %r\nexpected: %r" % (r, expected['raw']) + ) + assert r['metadata'] == expected['metadata'] + assert r['data'] == expected['data'] + assert r['secret'] == expected['secret'] + + @pytest.mark.parametrize( + 'exc', + [ + (hvac.exceptions.Forbidden, "", r"^Forbidden: Permission Denied to path \['([^']+)'\]"), + ( + hvac.exceptions.InvalidPath, + "Invalid path for a versioned K/V secrets engine", + r"^Invalid path for a versioned K/V secrets engine \['[^']+'\]. If this is a KV version 2 path, use community.hashi_vault.vault_kv2_get" + ), + (hvac.exceptions.InvalidPath, "", r"^Invalid or missing path \['[^']+'\]"), + ] + ) + @pytest.mark.parametrize('path', ['path/1', 'second/path']) + def test_vault_kv1_get_exceptions(self, vault_kv1_get_lookup, minimal_vars, vault_client, path, exc): + client = vault_client + + client.secrets.kv.v1.read_secret.side_effect = exc[0](exc[1]) + + with pytest.raises(AnsibleError) as e: + vault_kv1_get_lookup.run(terms=[path], variables=minimal_vars) + + match = re.search(exc[2], str(e.value)) + + assert match is not None, "result: %r\ndid not match: %s" % (e.value, exc[2]) + + try: + assert path == match.group(1), "expected: %s\ngot: %s" % (match.group(1), path) + except IndexError: + pass diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_vault_kv2_get.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_vault_kv2_get.py new file mode 100644 index 000000000..c273feaa5 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_vault_kv2_get.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +import pytest + +from ansible.plugins.loader import lookup_loader +from ansible.errors import AnsibleError + +from ...compat import mock + +from .....plugins.plugin_utils._hashi_vault_lookup_base import HashiVaultLookupBase +from .....plugins.module_utils._hashi_vault_common import HashiVaultValueError + +from .....plugins.lookup import vault_kv2_get + + +hvac = pytest.importorskip('hvac') + + +pytestmark = pytest.mark.usefixtures( + 'patch_authenticator', + 'patch_get_vault_client', +) + + +@pytest.fixture +def vault_kv2_get_lookup(): + return lookup_loader.get('community.hashi_vault.vault_kv2_get') + + +@pytest.fixture +def kv2_get_response(fixture_loader): + return fixture_loader('kv2_get_response.json') + + +class TestVaultKv2GetLookup(object): + + def test_vault_kv2_get_is_lookup_base(self, vault_kv2_get_lookup): + assert issubclass(type(vault_kv2_get_lookup), HashiVaultLookupBase) + + def test_vault_kv2_get_no_hvac(self, vault_kv2_get_lookup, minimal_vars): + with mock.patch.object(vault_kv2_get, 'HVAC_IMPORT_ERROR', new=ImportError()): + with pytest.raises(AnsibleError, match=r"This plugin requires the 'hvac' Python library"): + vault_kv2_get_lookup.run(terms='fake', variables=minimal_vars) + + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_kv2_get_authentication_error(self, vault_kv2_get_lookup, minimal_vars, authenticator, exc): + authenticator.authenticate.side_effect = exc + + with pytest.raises(AnsibleError, match=r'throwaway msg'): + vault_kv2_get_lookup.run(terms='fake', variables=minimal_vars) + + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_kv2_get_auth_validation_error(self, vault_kv2_get_lookup, minimal_vars, authenticator, exc): + authenticator.validate.side_effect = exc + + with pytest.raises(AnsibleError, match=r'throwaway msg'): + vault_kv2_get_lookup.run(terms='fake', variables=minimal_vars) + + @pytest.mark.parametrize('paths', [['fake1'], ['fake2', 'fake3']]) + @pytest.mark.parametrize('engine_mount_point', ['secret', 'other']) + @pytest.mark.parametrize('version', [None, 2, 10]) + def test_vault_kv2_get_return_data(self, vault_kv2_get_lookup, minimal_vars, kv2_get_response, vault_client, paths, engine_mount_point, version): + client = vault_client + rv = kv2_get_response.copy() + rv['data']['metadata']['version'] = version + + expected = {} + expected['raw'] = rv.copy() + expected['metadata'] = expected['raw']['data']['metadata'] + expected['data'] = expected['raw']['data'] + expected['secret'] = expected['data']['data'] + + expected_calls = [mock.call(path=p, version=version, mount_point=engine_mount_point) for p in paths] + + def _fake_kv2_get(path, version, mount_point): + r = rv.copy() + r['data']['data'] = r['data']['data'].copy() + r['data']['data'].update({'_path': path}) + r['data']['data'].update({'_mount': mount_point}) + return r + + client.secrets.kv.v2.read_secret_version = mock.Mock(wraps=_fake_kv2_get) + + response = vault_kv2_get_lookup.run(terms=paths, variables=minimal_vars, version=version, engine_mount_point=engine_mount_point) + + client.secrets.kv.v2.read_secret_version.assert_has_calls(expected_calls) + + assert len(response) == len(paths), "%i paths processed but got %i responses" % (len(paths), len(response)) + + for p in paths: + r = response.pop(0) + ins_p = r['secret'].pop('_path') + ins_m = r['secret'].pop('_mount') + assert p == ins_p, "expected '_path=%s' field was not found in response, got %r" % (p, ins_p) + assert engine_mount_point == ins_m, "expected '_mount=%s' field was not found in response, got %r" % (engine_mount_point, ins_m) + assert r['raw'] == expected['raw'], ( + "remaining response did not match expected\nresponse: %r\nexpected: %r" % (r, expected['raw']) + ) + assert r['metadata'] == expected['metadata'] + assert r['data'] == expected['data'] + assert r['secret'] == expected['secret'] + + @pytest.mark.parametrize( + 'exc', + [ + (hvac.exceptions.Forbidden, "", r"^Forbidden: Permission Denied to path \['([^']+)'\]"), + ( + hvac.exceptions.InvalidPath, + "", + r"^Invalid or missing path \['([^']+)'\] with secret version '(\d+|latest)'. Check the path or secret version" + ), + ] + ) + @pytest.mark.parametrize('path', ['path/1', 'second/path']) + @pytest.mark.parametrize('version', [None, 2, 10]) + def test_vault_kv2_get_exceptions(self, vault_kv2_get_lookup, minimal_vars, vault_client, path, version, exc): + client = vault_client + + client.secrets.kv.v2.read_secret_version.side_effect = exc[0](exc[1]) + + with pytest.raises(AnsibleError) as e: + vault_kv2_get_lookup.run(terms=[path], variables=minimal_vars, version=version) + + match = re.search(exc[2], str(e.value)) + + assert path == match.group(1), "expected: %s\ngot: %s" % (match.group(1), path) + + try: + assert (version is None) == (match.group(2) == 'latest') + assert (version is not None) == (match.group(2) == str(version)) + except IndexError: + pass diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_vault_list.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_vault_list.py new file mode 100644 index 000000000..dddb4a381 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_vault_list.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2023 Tom Kivlin (@tomkivlin) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest + +from ansible.plugins.loader import lookup_loader +from ansible.errors import AnsibleError + +from ...compat import mock + +from .....plugins.plugin_utils._hashi_vault_lookup_base import HashiVaultLookupBase +from .....plugins.module_utils._hashi_vault_common import HashiVaultValueError + +from .....plugins.lookup import vault_list + + +hvac = pytest.importorskip('hvac') + + +pytestmark = pytest.mark.usefixtures( + 'patch_authenticator', + 'patch_get_vault_client', +) + + +@pytest.fixture +def vault_list_lookup(): + return lookup_loader.get('community.hashi_vault.vault_list') + + +LIST_FIXTURES = [ + 'kv2_list_response.json', + 'policy_list_response.json', + 'userpass_list_response.json', +] + + +@pytest.fixture(params=LIST_FIXTURES) +def list_response(request, fixture_loader): + return fixture_loader(request.param) + + +class TestVaultListLookup(object): + + def test_vault_list_is_lookup_base(self, vault_list_lookup): + assert issubclass(type(vault_list_lookup), HashiVaultLookupBase) + + def test_vault_list_no_hvac(self, vault_list_lookup, minimal_vars): + with mock.patch.object(vault_list, 'HVAC_IMPORT_ERROR', new=ImportError()): + with pytest.raises(AnsibleError, match=r"This plugin requires the 'hvac' Python library"): + vault_list_lookup.run(terms='fake', variables=minimal_vars) + + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_list_authentication_error(self, vault_list_lookup, minimal_vars, authenticator, exc): + authenticator.authenticate.side_effect = exc + + with pytest.raises(AnsibleError, match=r'throwaway msg'): + vault_list_lookup.run(terms='fake', variables=minimal_vars) + + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_list_auth_validation_error(self, vault_list_lookup, minimal_vars, authenticator, exc): + authenticator.validate.side_effect = exc + + with pytest.raises(AnsibleError, match=r'throwaway msg'): + vault_list_lookup.run(terms='fake', variables=minimal_vars) + + @pytest.mark.parametrize('paths', [['fake1'], ['fake2', 'fake3']]) + def test_vault_list_return_data(self, vault_list_lookup, minimal_vars, list_response, vault_client, paths): + client = vault_client + + expected_calls = [mock.call(p) for p in paths] + + def _fake_list_operation(path): + r = list_response.copy() + r.update({'_path': path}) + return r + + client.list = mock.Mock(wraps=_fake_list_operation) + + response = vault_list_lookup.run(terms=paths, variables=minimal_vars) + + client.list.assert_has_calls(expected_calls) + + assert len(response) == len(paths), "%i paths processed but got %i responses" % (len(paths), len(response)) + + for p in paths: + r = response.pop(0) + ins_p = r.pop('_path') + assert p == ins_p, "expected '_path=%s' field was not found in response, got %r" % (p, ins_p) diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_vault_login.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_vault_login.py new file mode 100644 index 000000000..aa15139ce --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_vault_login.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest + +from ansible.plugins.loader import lookup_loader +from ansible.errors import AnsibleError + +from ansible_collections.community.hashi_vault.tests.unit.compat import mock + +from ansible_collections.community.hashi_vault.plugins.plugin_utils._hashi_vault_lookup_base import HashiVaultLookupBase + +from .....plugins.lookup import vault_login + + +pytest.importorskip('hvac') + + +pytestmark = pytest.mark.usefixtures( + 'patch_authenticator', + 'patch_get_vault_client', +) + + +@pytest.fixture +def vault_login_lookup(): + return lookup_loader.get('community.hashi_vault.vault_login') + + +class TestVaultLoginLookup(object): + + def test_vault_login_is_lookup_base(self, vault_login_lookup): + assert issubclass(type(vault_login_lookup), HashiVaultLookupBase) + + def test_vault_login_no_hvac(self, vault_login_lookup, minimal_vars): + with mock.patch.object(vault_login, 'HVAC_IMPORT_ERROR', new=ImportError()): + with pytest.raises(AnsibleError, match=r"This plugin requires the 'hvac' Python library"): + vault_login_lookup.run(terms='fake', variables=minimal_vars) + + def test_vault_login_auth_none(self, vault_login_lookup): + with pytest.raises(AnsibleError, match=r"The 'none' auth method is not valid for this lookup"): + vault_login_lookup.run(terms=[], variables={'ansible_hashi_vault_auth_method': 'none'}) + + def test_vault_login_extra_terms(self, vault_login_lookup, authenticator, minimal_vars): + with mock.patch('ansible_collections.community.hashi_vault.plugins.lookup.vault_login.display.warning') as warning: + with mock.patch.object(vault_login_lookup, 'authenticator', new=authenticator): + vault_login_lookup.run(terms=['', ''], variables=minimal_vars) + warning.assert_called_once_with("Supplied term strings will be ignored. This lookup does not use term strings.") diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_vault_read.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_vault_read.py new file mode 100644 index 000000000..b410cf9c4 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_vault_read.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest + +from ansible.plugins.loader import lookup_loader +from ansible.errors import AnsibleError + +from ...compat import mock + +from .....plugins.plugin_utils._hashi_vault_lookup_base import HashiVaultLookupBase +from .....plugins.module_utils._hashi_vault_common import HashiVaultValueError + +from .....plugins.lookup import vault_read + + +hvac = pytest.importorskip('hvac') + + +pytestmark = pytest.mark.usefixtures( + 'patch_authenticator', + 'patch_get_vault_client', +) + + +@pytest.fixture +def vault_read_lookup(): + return lookup_loader.get('community.hashi_vault.vault_read') + + +@pytest.fixture +def kv1_get_response(fixture_loader): + return fixture_loader('kv1_get_response.json') + + +class TestVaultReadLookup(object): + + def test_vault_read_is_lookup_base(self, vault_read_lookup): + assert issubclass(type(vault_read_lookup), HashiVaultLookupBase) + + def test_vault_read_no_hvac(self, vault_read_lookup, minimal_vars): + with mock.patch.object(vault_read, 'HVAC_IMPORT_ERROR', new=ImportError()): + with pytest.raises(AnsibleError, match=r"This plugin requires the 'hvac' Python library"): + vault_read_lookup.run(terms='fake', variables=minimal_vars) + + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_read_authentication_error(self, vault_read_lookup, minimal_vars, authenticator, exc): + authenticator.authenticate.side_effect = exc + + with pytest.raises(AnsibleError, match=r'throwaway msg'): + vault_read_lookup.run(terms='fake', variables=minimal_vars) + + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_read_auth_validation_error(self, vault_read_lookup, minimal_vars, authenticator, exc): + authenticator.validate.side_effect = exc + + with pytest.raises(AnsibleError, match=r'throwaway msg'): + vault_read_lookup.run(terms='fake', variables=minimal_vars) + + @pytest.mark.parametrize('paths', [['fake1'], ['fake2', 'fake3']]) + def test_vault_read_return_data(self, vault_read_lookup, minimal_vars, kv1_get_response, vault_client, paths): + client = vault_client + + expected_calls = [mock.call(p) for p in paths] + + def _fake_kv1_get(path): + r = kv1_get_response.copy() + r.update({'_path': path}) + return r + + client.read = mock.Mock(wraps=_fake_kv1_get) + + response = vault_read_lookup.run(terms=paths, variables=minimal_vars) + + client.read.assert_has_calls(expected_calls) + + assert len(response) == len(paths), "%i paths processed but got %i responses" % (len(paths), len(response)) + + for p in paths: + r = response.pop(0) + ins_p = r.pop('_path') + assert p == ins_p, "expected '_path=%s' field was not found in response, got %r" % (p, ins_p) diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_vault_token_create.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_vault_token_create.py new file mode 100644 index 000000000..05a74f8b4 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_vault_token_create.py @@ -0,0 +1,241 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import sys +import pytest + +from ansible.plugins.loader import lookup_loader +from ansible.errors import AnsibleError + +from ansible_collections.community.hashi_vault.tests.unit.compat import mock + +from ansible_collections.community.hashi_vault.plugins.plugin_utils._hashi_vault_lookup_base import HashiVaultLookupBase + +from .....plugins.lookup import vault_token_create +from .....plugins.module_utils._hashi_vault_common import HashiVaultValueError + + +pytest.importorskip('hvac') + + +pytestmark = pytest.mark.usefixtures( + 'patch_authenticator', + 'patch_get_vault_client', +) + + +@pytest.fixture +def vault_token_create_lookup(): + return lookup_loader.get('community.hashi_vault.vault_token_create') + + +@pytest.fixture +def pass_thru_options(): + return { + 'no_parent': True, + 'no_default_policy': True, + 'policies': ['a', 'b'], + 'id': 'tokenid', + 'role_name': 'role', + 'meta': {'a': 'valA', 'b': 'valB'}, + 'renewable': True, + 'ttl': '1h', + 'type': 'batch', + 'explicit_max_ttl': '2h', + 'display_name': 'kiminonamae', + 'num_uses': 9, + 'period': '8h', + 'entity_alias': 'alias', + 'wrap_ttl': '60s', + } + + +@pytest.fixture +def orphan_option_translation(): + return { + 'id': 'token_id', + 'role_name': 'role', + 'type': 'token_type', + } + + +@pytest.fixture +def token_create_response(fixture_loader): + return fixture_loader('token_create_response.json') + + +class TestVaultTokenCreateLookup(object): + + def test_vault_token_create_is_lookup_base(self, vault_token_create_lookup): + assert issubclass(type(vault_token_create_lookup), HashiVaultLookupBase) + + def test_vault_token_create_no_hvac(self, vault_token_create_lookup, minimal_vars): + with mock.patch.object(vault_token_create, 'HVAC_IMPORT_ERROR', new=ImportError()): + with pytest.raises(AnsibleError, match=r"This plugin requires the 'hvac' Python library"): + vault_token_create_lookup.run(terms='fake', variables=minimal_vars) + + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_token_create_authentication_error(self, vault_token_create_lookup, minimal_vars, authenticator, exc): + authenticator.authenticate.side_effect = exc + + with pytest.raises(AnsibleError, match=r'throwaway msg'): + vault_token_create_lookup.run(terms='fake', variables=minimal_vars) + + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_token_create_auth_validation_error(self, vault_token_create_lookup, minimal_vars, authenticator, exc): + authenticator.validate.side_effect = exc + + with pytest.raises(AnsibleError, match=r'throwaway msg'): + vault_token_create_lookup.run(terms='fake', variables=minimal_vars) + + def test_vault_token_create_extra_terms(self, vault_token_create_lookup, authenticator, minimal_vars): + with mock.patch('ansible_collections.community.hashi_vault.plugins.lookup.vault_token_create.display.warning') as warning: + with mock.patch.object(vault_token_create_lookup, 'authenticator', new=authenticator): + with mock.patch.object(vault_token_create_lookup.helper, 'get_vault_client'): + vault_token_create_lookup.run(terms=['', ''], variables=minimal_vars) + warning.assert_called_once_with("Supplied term strings will be ignored. This lookup does not use term strings.") + + def test_vault_token_create_passthru_options_expected(self, vault_token_create_lookup, pass_thru_options): + # designed to catch the case where new passthru options differ between tests and lookup + + lookup_set = set(vault_token_create_lookup.PASS_THRU_OPTION_NAMES) + test_set = set(pass_thru_options.keys()) + + assert sorted(vault_token_create_lookup.PASS_THRU_OPTION_NAMES) == sorted(pass_thru_options.keys()), ( + "Passthru options in lookup do not match options in test: %r" % ( + list(lookup_set ^ test_set) + ) + ) + + def test_vault_token_create_orphan_options_expected(self, vault_token_create_lookup, orphan_option_translation, pass_thru_options): + # designed to catch the case where new orphan translations differ between tests and lookup + # and that all listed translations are present in passthru options + + lookup_set = set(vault_token_create_lookup.ORPHAN_OPTION_TRANSLATION.items()) + test_set = set(orphan_option_translation.items()) + + lookup_key_set = set(vault_token_create_lookup.ORPHAN_OPTION_TRANSLATION.keys()) + pass_thru_key_set = set(pass_thru_options.keys()) + + assert lookup_set == test_set, ( + "Orphan options in lookup do not match orphan options in test:\nlookup: %r\ntest: %r" % ( + dict(lookup_set - test_set), + dict(test_set - lookup_set), + ) + ) + assert vault_token_create_lookup.ORPHAN_OPTION_TRANSLATION.keys() <= pass_thru_options.keys(), ( + "Orphan option translation keys must exist in passthru options: %r" % ( + list(lookup_key_set - pass_thru_key_set), + ) + ) + + def test_vault_token_create_passthru_options(self, vault_token_create_lookup, authenticator, minimal_vars, pass_thru_options, token_create_response): + + client = mock.MagicMock() + client.auth.token.create.return_value = token_create_response + + with mock.patch.object(vault_token_create_lookup, 'authenticator', new=authenticator): + with mock.patch.object(vault_token_create_lookup.helper, 'get_vault_client', return_value=client): + result = vault_token_create_lookup.run(terms=[], variables=minimal_vars, **pass_thru_options) + + client.create_token.assert_not_called() + client.auth.token.create.assert_called_once() + + assert result[0] == token_create_response, ( + "lookup result did not match expected result:\nlookup: %r\nexpected: %r" % (result, token_create_response) + ) + + if sys.version_info < (3, 8): + # TODO: remove when python < 3.8 is dropped + assert pass_thru_options.items() <= client.auth.token.create.call_args[1].items() + else: + assert pass_thru_options.items() <= client.auth.token.create.call_args.kwargs.items() + + def test_vault_token_create_orphan_options( + self, vault_token_create_lookup, authenticator, minimal_vars, pass_thru_options, orphan_option_translation, token_create_response + ): + + client = mock.MagicMock() + client.auth.token.create_orphan.return_value = token_create_response + + with mock.patch.object(vault_token_create_lookup, 'authenticator', new=authenticator): + with mock.patch.object(vault_token_create_lookup.helper, 'get_vault_client', return_value=client): + result = vault_token_create_lookup.run(terms=[], variables=minimal_vars, orphan=True, **pass_thru_options) + + client.auth.token.create.assert_not_called() + client.auth.token.create_orphan.assert_called_once() + client.create_token.assert_not_called() + + assert result[0] == token_create_response, ( + "lookup result did not match expected result:\nlookup: %r\nexpected: %r" % (result, token_create_response) + ) + + if sys.version_info < (3, 8): + # TODO: remove when python < 3.8 is dropped + call_kwargs = client.auth.token.create_orphan.call_args[1] + else: + call_kwargs = client.auth.token.create_orphan.call_args.kwargs + + for name, orphan in orphan_option_translation.items(): + assert name not in call_kwargs, ( + "'%s' was found in call to orphan method, should be '%s'" % (name, orphan) + ) + assert orphan in call_kwargs, ( + "'%s' (from '%s') was not found in call to orphan method" % (orphan, name) + ) + assert call_kwargs[orphan] == pass_thru_options.get(name), ( + "Expected orphan param '%s' not found or value did not match:\nvalue: %r\nexpected: %r" % ( + orphan, + call_kwargs.get(orphan), + pass_thru_options.get(name), + ) + ) + + def test_vault_token_create_orphan_fallback(self, vault_token_create_lookup, authenticator, minimal_vars, pass_thru_options, token_create_response): + client = mock.MagicMock() + client.create_token.return_value = token_create_response + client.auth.token.create_orphan.side_effect = AttributeError + + with mock.patch.object(vault_token_create_lookup, 'authenticator', new=authenticator): + with mock.patch.object(vault_token_create_lookup.helper, 'get_vault_client', return_value=client): + result = vault_token_create_lookup.run(terms=[], variables=minimal_vars, orphan=True, **pass_thru_options) + + client.auth.token.create_orphan.assert_called_once() + client.create_token.assert_called_once() + + assert result[0] == token_create_response, ( + "lookup result did not match expected result:\nlookup: %r\nexpected: %r" % (result, token_create_response) + ) + + def test_vault_token_create_exception_handling_standard(self, vault_token_create_lookup, authenticator, minimal_vars, pass_thru_options): + client = mock.MagicMock() + client.auth.token.create.side_effect = Exception('side_effect') + + with mock.patch.object(vault_token_create_lookup, 'authenticator', new=authenticator): + with mock.patch.object(vault_token_create_lookup.helper, 'get_vault_client', return_value=client): + with pytest.raises(AnsibleError, match=r'^side_effect$'): + vault_token_create_lookup.run(terms=[], variables=minimal_vars, **pass_thru_options) + + def test_vault_token_create_exception_handling_orphan(self, vault_token_create_lookup, authenticator, minimal_vars, pass_thru_options): + client = mock.MagicMock() + client.auth.token.create_orphan.side_effect = Exception('side_effect') + + with mock.patch.object(vault_token_create_lookup, 'authenticator', new=authenticator): + with mock.patch.object(vault_token_create_lookup.helper, 'get_vault_client', return_value=client): + with pytest.raises(AnsibleError, match=r'^side_effect$'): + vault_token_create_lookup.run(terms=[], variables=minimal_vars, orphan=True, **pass_thru_options) + + def test_vault_token_create_exception_handling_orphan_fallback(self, vault_token_create_lookup, authenticator, minimal_vars, pass_thru_options): + client = mock.MagicMock() + client.create_token.side_effect = Exception('side_effect') + client.auth.token.create_orphan.side_effect = AttributeError + + with mock.patch.object(vault_token_create_lookup, 'authenticator', new=authenticator): + with mock.patch.object(vault_token_create_lookup.helper, 'get_vault_client', return_value=client): + with pytest.raises(AnsibleError, match=r'^side_effect$'): + vault_token_create_lookup.run(terms=[], variables=minimal_vars, orphan=True, **pass_thru_options) diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_vault_write.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_vault_write.py new file mode 100644 index 000000000..c3c325228 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/lookup/test_vault_write.py @@ -0,0 +1,133 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest + +from ansible.plugins.loader import lookup_loader +from ansible.errors import AnsibleError + +from ...compat import mock + +from .....plugins.plugin_utils._hashi_vault_lookup_base import HashiVaultLookupBase +from .....plugins.module_utils._hashi_vault_common import HashiVaultValueError + +from .....plugins.lookup import vault_write + + +hvac = pytest.importorskip('hvac') + + +pytestmark = pytest.mark.usefixtures( + 'patch_authenticator', + 'patch_get_vault_client', +) + + +@pytest.fixture +def vault_write_lookup(): + return lookup_loader.get('community.hashi_vault.vault_write') + + +@pytest.fixture +def approle_secret_id_write_response(fixture_loader): + return fixture_loader('approle_secret_id_write_response.json') + + +class TestVaultWriteLookup(object): + + def test_vault_write_is_lookup_base(self, vault_write_lookup): + assert issubclass(type(vault_write_lookup), HashiVaultLookupBase) + + def test_vault_write_no_hvac(self, vault_write_lookup, minimal_vars): + with mock.patch.object(vault_write, 'HVAC_IMPORT_ERROR', new=ImportError()): + with pytest.raises(AnsibleError, match=r"This plugin requires the 'hvac' Python library"): + vault_write_lookup.run(terms='fake', variables=minimal_vars) + + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_write_authentication_error(self, vault_write_lookup, minimal_vars, authenticator, exc): + authenticator.authenticate.side_effect = exc + + with pytest.raises(AnsibleError, match=r'throwaway msg'): + vault_write_lookup.run(terms='fake', variables=minimal_vars) + + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_write_auth_validation_error(self, vault_write_lookup, minimal_vars, authenticator, exc): + authenticator.validate.side_effect = exc + + with pytest.raises(AnsibleError, match=r'throwaway msg'): + vault_write_lookup.run(terms='fake', variables=minimal_vars) + + @pytest.mark.parametrize('paths', [['fake1'], ['fake2', 'fake3']]) + @pytest.mark.parametrize('data', [{}, {'a': 1, 'b': 'two'}]) + @pytest.mark.parametrize('wrap_ttl', [None, '5m']) + def test_vault_write_return_data(self, vault_write_lookup, minimal_vars, approle_secret_id_write_response, vault_client, paths, data, wrap_ttl): + client = vault_client + + expected_calls = [mock.call(path=p, wrap_ttl=wrap_ttl, **data) for p in paths] + + def _fake_write(path, wrap_ttl, **data): + r = approle_secret_id_write_response.copy() + r.update({'path': path}) + return r + + client.write = mock.Mock(wraps=_fake_write) + + response = vault_write_lookup.run(terms=paths, variables=minimal_vars, wrap_ttl=wrap_ttl, data=data) + + client.write.assert_has_calls(expected_calls) + + assert len(response) == len(paths), "%i paths processed but got %i responses" % (len(paths), len(response)) + + for p in paths: + r = response.pop(0) + m = r.pop('path') + assert p == m, "expected 'path=%s' field was not found in response, got %r" % (p, m) + assert r == approle_secret_id_write_response, ( + "remaining response did not match expected\nresponse: %r\nexpected: %r" % (r, approle_secret_id_write_response) + ) + + def test_vault_write_empty_response(self, vault_write_lookup, minimal_vars, vault_client, requests_unparseable_response): + client = vault_client + + requests_unparseable_response.status_code = 204 + + client.write.return_value = requests_unparseable_response + + response = vault_write_lookup.run(terms=['fake'], variables=minimal_vars) + + assert response[0] == {} + + def test_vault_write_unparseable_response(self, vault_write_lookup, minimal_vars, vault_client, requests_unparseable_response): + client = vault_client + + requests_unparseable_response.status_code = 200 + requests_unparseable_response.content = '﷽' + + client.write.return_value = requests_unparseable_response + + with mock.patch('ansible_collections.community.hashi_vault.plugins.lookup.vault_write.display.warning') as warning: + response = vault_write_lookup.run(terms=['fake'], variables=minimal_vars) + warning.assert_called_once_with('Vault returned status code 200 and an unparsable body.') + + assert response[0] == '﷽' + + @pytest.mark.parametrize( + 'exc', + [ + (hvac.exceptions.Forbidden, r'^Forbidden: Permission Denied to path'), + (hvac.exceptions.InvalidPath, r"^The path '[^']+' doesn't seem to exist"), + (hvac.exceptions.InternalServerError, r'^Internal Server Error:'), + ] + ) + def test_vault_write_exceptions(self, vault_write_lookup, minimal_vars, vault_client, exc): + client = vault_client + + client.write.side_effect = exc[0] + + with pytest.raises(AnsibleError, match=exc[1]): + vault_write_lookup.run(terms=['fake'], variables=minimal_vars) diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/conftest.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/conftest.py new file mode 100644 index 000000000..d020114f3 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/conftest.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest +import contextlib + +try: + import hvac +except ImportError: + # python 2.6, which isn't supported anyway + pass + +from ansible_collections.community.hashi_vault.tests.unit.compat import mock + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import ( + HashiVaultAuthMethodBase, + HashiVaultOptionAdapter, +) + + +class HashiVaultAuthMethodFake(HashiVaultAuthMethodBase): + NAME = 'fake' + OPTIONS = [] + + def __init__(self, option_adapter, warning_callback, deprecate_callback): + super(HashiVaultAuthMethodFake, self).__init__(option_adapter, warning_callback, deprecate_callback) + + validate = mock.MagicMock() + authenticate = mock.MagicMock() + + +@pytest.fixture +def option_dict(): + return {'auth_method': 'fake'} + + +@pytest.fixture +def adapter(option_dict): + return HashiVaultOptionAdapter.from_dict(option_dict) + + +@pytest.fixture +def fake_auth_class(adapter, warner, deprecator): + return HashiVaultAuthMethodFake(adapter, warner, deprecator) + + +@pytest.fixture +def client(): + return hvac.Client() + + +@pytest.fixture +def warner(): + return mock.MagicMock() + + +@pytest.fixture +def deprecator(): + return mock.MagicMock() + + +@pytest.fixture +def mock_import_error(): + @contextlib.contextmanager + def _mock_import_error(*names): + import builtins + + real_import = builtins.__import__ + + def _fake_importer(name, *args, **kwargs): + if name in names: + raise ImportError + + return real_import(name, *args, **kwargs) + + with mock.patch.object(builtins, '__import__', side_effect=_fake_importer): + yield + + return _mock_import_error diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_approle.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_approle.py new file mode 100644 index 000000000..4507b1bd8 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_approle.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest + +from ansible_collections.community.hashi_vault.tests.unit.compat import mock + +from ansible_collections.community.hashi_vault.plugins.module_utils._auth_method_approle import ( + HashiVaultAuthMethodApprole, +) + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import ( + HashiVaultAuthMethodBase, + HashiVaultValueError, +) + + +@pytest.fixture +def option_dict(): + return { + 'auth_method': 'approle', + 'secret_id': None, + 'role_id': None, + 'mount_point': None, + } + + +@pytest.fixture +def secret_id(): + return 'opaque' + + +@pytest.fixture +def role_id(): + return 'fake-role' + + +@pytest.fixture +def auth_approle(adapter, warner, deprecator): + return HashiVaultAuthMethodApprole(adapter, warner, deprecator) + + +@pytest.fixture +def approle_login_response(fixture_loader): + return fixture_loader('approle_login_response.json') + + +class TestAuthApprole(object): + + def test_auth_approle_is_auth_method_base(self, auth_approle): + assert isinstance(auth_approle, HashiVaultAuthMethodApprole) + assert issubclass(HashiVaultAuthMethodApprole, HashiVaultAuthMethodBase) + + def test_auth_approle_validate_direct(self, auth_approle, adapter, role_id): + adapter.set_option('role_id', role_id) + + auth_approle.validate() + + @pytest.mark.parametrize('opt_patch', [ + {}, + {'secret_id': 'secret_id-only'}, + ]) + def test_auth_approle_validate_xfailures(self, auth_approle, adapter, opt_patch): + adapter.set_options(**opt_patch) + + with pytest.raises(HashiVaultValueError, match=r'Authentication method approle requires options .*? to be set, but these are missing:'): + auth_approle.validate() + + @pytest.mark.parametrize('use_token', [True, False], ids=lambda x: 'use_token=%s' % x) + @pytest.mark.parametrize('mount_point', [None, 'other'], ids=lambda x: 'mount_point=%s' % x) + def test_auth_approle_authenticate(self, auth_approle, client, adapter, secret_id, role_id, mount_point, use_token, approle_login_response): + adapter.set_option('secret_id', secret_id) + adapter.set_option('role_id', role_id) + adapter.set_option('mount_point', mount_point) + + expected_login_params = { + 'secret_id': secret_id, + 'role_id': role_id, + 'use_token': use_token, + } + if mount_point: + expected_login_params['mount_point'] = mount_point + + def _set_client_token(*args, **kwargs): + if kwargs['use_token']: + client.token = approle_login_response['auth']['client_token'] + return approle_login_response + + with mock.patch.object(client.auth.approle, 'login', side_effect=_set_client_token) as approle_login: + response = auth_approle.authenticate(client, use_token=use_token) + approle_login.assert_called_once_with(**expected_login_params) + + assert response['auth']['client_token'] == approle_login_response['auth']['client_token'] + assert (client.token == approle_login_response['auth']['client_token']) is use_token diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_aws_iam.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_aws_iam.py new file mode 100644 index 000000000..678146b92 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_aws_iam.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest + +from ansible_collections.community.hashi_vault.tests.unit.compat import mock + +from ansible_collections.community.hashi_vault.plugins.module_utils._auth_method_aws_iam import ( + HashiVaultAuthMethodAwsIam, +) + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import ( + HashiVaultAuthMethodBase, + HashiVaultValueError, +) + + +@pytest.fixture +def option_dict(): + return { + 'auth_method': 'aws_iam', + 'aws_access_key': None, + 'aws_secret_key': None, + 'aws_profile': None, + 'aws_security_token': None, + 'region': None, + 'aws_iam_server_id': None, + 'role_id': None, + 'mount_point': None, + } + + +@pytest.fixture +def aws_access_key(): + return 'access-key' + + +@pytest.fixture +def aws_secret_key(): + return 'secret-key' + + +@pytest.fixture +def aws_session_token(): + return 'session-token' + + +@pytest.fixture +def auth_aws_iam(adapter, warner, deprecator): + return HashiVaultAuthMethodAwsIam(adapter, warner, deprecator) + + +@pytest.fixture +def aws_iam_login_response(fixture_loader): + return fixture_loader('aws_iam_login_response.json') + + +@pytest.fixture +def boto_mocks(aws_access_key, aws_secret_key, aws_session_token): + class botocore_profile_not_found(Exception): + pass + + credentials = mock.MagicMock(access_key=aws_access_key, secret_key=aws_secret_key, token=aws_session_token) + mock_session = mock.MagicMock(get_credentials=mock.MagicMock(return_value=credentials)) + + def _Session(profile_name): + if profile_name == 'missing_profile': + raise botocore_profile_not_found + + return mock_session + + boto3 = mock.MagicMock() + boto3.session.Session = mock.MagicMock(side_effect=_Session) + + botocore = mock.MagicMock() + botocore.exceptions.ProfileNotFound = botocore_profile_not_found + + return mock.MagicMock( + botocore=botocore, + boto3=boto3, + session=mock_session, + credentials=credentials + ) + + +class TestAuthAwsIam(object): + + def test_auth_aws_iam_is_auth_method_base(self, auth_aws_iam): + assert isinstance(auth_aws_iam, HashiVaultAuthMethodAwsIam) + assert issubclass(HashiVaultAuthMethodAwsIam, HashiVaultAuthMethodBase) + + @pytest.mark.parametrize('aws_security_token', [None, 'session-token'], ids=lambda x: 'aws_security_token=%s' % x) + @pytest.mark.parametrize('region', [None, 'ap-northeast-1'], ids=lambda x: 'region=%s' % x) + @pytest.mark.parametrize('aws_iam_server_id', [None, 'server-id'], ids=lambda x: 'aws_iam_server_id=%s' % x) + @pytest.mark.parametrize('role_id', [None, 'vault-role'], ids=lambda x: 'role_id=%s' % x) + @pytest.mark.parametrize('mount_point', [None, 'other'], ids=lambda x: 'mount_point=%s' % x) + def test_auth_aws_iam_validate( + self, auth_aws_iam, adapter, aws_access_key, aws_secret_key, aws_security_token, + region, aws_iam_server_id, role_id, mount_point + ): + adapter.set_options( + aws_access_key=aws_access_key, aws_secret_key=aws_secret_key, aws_security_token=aws_security_token, + region=region, aws_iam_server_id=aws_iam_server_id, role_id=role_id, mount_point=mount_point + ) + + auth_aws_iam.validate() + + login_params = auth_aws_iam._auth_aws_iam_login_params + + assert login_params['access_key'] == aws_access_key + assert login_params['secret_key'] == aws_secret_key + + assert (aws_security_token is None and 'session_token' not in login_params) or login_params['session_token'] == aws_security_token + assert (mount_point is None and 'mount_point' not in login_params) or login_params['mount_point'] == mount_point + assert (role_id is None and 'role' not in login_params) or login_params['role'] == role_id + assert (region is None and 'region' not in login_params) or login_params['region'] == region + assert (aws_iam_server_id is None and 'header_value' not in login_params) or login_params['header_value'] == aws_iam_server_id + + @pytest.mark.parametrize('use_token', [True, False], ids=lambda x: 'use_token=%s' % x) + @pytest.mark.parametrize('mount_point', [None, 'other'], ids=lambda x: 'mount_point=%s' % x) + @pytest.mark.parametrize('aws_security_token', [None, 'session-token'], ids=lambda x: 'aws_security_token=%s' % x) + @pytest.mark.parametrize('region', [None, 'ap-northeast-1'], ids=lambda x: 'region=%s' % x) + @pytest.mark.parametrize('aws_iam_server_id', [None, 'server-id'], ids=lambda x: 'aws_iam_server_id=%s' % x) + @pytest.mark.parametrize('role_id', [None, 'vault-role'], ids=lambda x: 'role_id=%s' % x) + def test_auth_aws_iam_authenticate( + self, auth_aws_iam, client, adapter, aws_access_key, aws_secret_key, aws_security_token, + region, aws_iam_server_id, role_id, mount_point, use_token, aws_iam_login_response + ): + adapter.set_options( + aws_access_key=aws_access_key, aws_secret_key=aws_secret_key, aws_security_token=aws_security_token, + region=region, aws_iam_server_id=aws_iam_server_id, role_id=role_id, mount_point=mount_point + ) + + auth_aws_iam.validate() + + expected_login_params = auth_aws_iam._auth_aws_iam_login_params.copy() + + with mock.patch.object(client.auth.aws, 'iam_login', return_value=aws_iam_login_response) as aws_iam_login: + response = auth_aws_iam.authenticate(client, use_token=use_token) + aws_iam_login.assert_called_once_with(use_token=use_token, **expected_login_params) + + assert response['auth']['client_token'] == aws_iam_login_response['auth']['client_token'] + + def test_auth_aws_iam_validate_no_creds_no_boto(self, auth_aws_iam, mock_import_error): + with mock_import_error('botocore', 'boto3'): + with pytest.raises(HashiVaultValueError, match=r'boto3 is required for loading a profile or IAM role credentials'): + auth_aws_iam.validate() + + @pytest.mark.parametrize('profile', ['my_aws_profile', None]) + def test_auth_aws_iam_validate_inferred_creds(self, auth_aws_iam, boto_mocks, adapter, profile, aws_access_key, aws_secret_key, aws_session_token): + adapter.set_option('aws_profile', profile) + + botocore = boto_mocks.botocore + boto3 = boto_mocks.boto3 + + with mock.patch.dict('sys.modules', {'botocore': botocore, 'boto3': boto3}): + auth_aws_iam.validate() + + params = auth_aws_iam._auth_aws_iam_login_params + + assert boto3.session.Session.called_once_with(profile_name=profile) + + assert params['access_key'] == aws_access_key + assert params['secret_key'] == aws_secret_key + assert params['session_token'] == aws_session_token + + @pytest.mark.parametrize('profile', ['missing_profile']) + def test_auth_aws_iam_validate_missing_profile(self, auth_aws_iam, boto_mocks, adapter, profile): + adapter.set_option('aws_profile', profile) + + botocore = boto_mocks.botocore + boto3 = boto_mocks.boto3 + + with mock.patch.dict('sys.modules', {'botocore': botocore, 'boto3': boto3}): + with pytest.raises(HashiVaultValueError, match="The AWS profile '%s' was not found" % profile): + auth_aws_iam.validate() + + @pytest.mark.parametrize('profile', ['my_aws_profile', None]) + def test_auth_aws_iam_validate_no_inferred_creds_found(self, auth_aws_iam, boto_mocks, adapter, profile): + adapter.set_option('aws_profile', profile) + + botocore = boto_mocks.botocore + boto3 = boto_mocks.boto3 + + with mock.patch.dict('sys.modules', {'botocore': botocore, 'boto3': boto3}): + with mock.patch.object(boto_mocks.session, 'get_credentials', return_value=None): + with pytest.raises(HashiVaultValueError, match=r'No AWS credentials supplied or available'): + auth_aws_iam.validate() diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_azure.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_azure.py new file mode 100644 index 000000000..747a432df --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_azure.py @@ -0,0 +1,224 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Junrui Chen (@jchenship) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +import pytest + +from ansible_collections.community.hashi_vault.tests.unit.compat import mock + +from ansible_collections.community.hashi_vault.plugins.module_utils._auth_method_azure import ( + HashiVaultAuthMethodAzure, +) + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import ( + HashiVaultAuthMethodBase, + HashiVaultValueError, +) + + +@pytest.fixture +def option_dict(): + return { + 'auth_method': 'azure', + 'role_id': 'vault-role', + 'mount_point': None, + 'jwt': None, + 'azure_tenant_id': None, + 'azure_client_id': None, + 'azure_client_secret': None, + 'azure_resource': 'https://management.azure.com/', + } + + +@pytest.fixture +def azure_client_id(): + return 'client-id' + + +@pytest.fixture +def azure_client_secret(): + return 'client-secret' + + +@pytest.fixture +def jwt(): + return 'jwt-token' + + +@pytest.fixture +def auth_azure(adapter, warner, deprecator): + return HashiVaultAuthMethodAzure(adapter, warner, deprecator) + + +@pytest.fixture +def azure_login_response(fixture_loader): + return fixture_loader('azure_login_response.json') + + +class TestAuthAzure(object): + def test_auth_azure_is_auth_method_base(self, auth_azure): + assert isinstance(auth_azure, HashiVaultAuthMethodAzure) + assert issubclass(HashiVaultAuthMethodAzure, HashiVaultAuthMethodBase) + + def test_auth_azure_validate_role_id(self, auth_azure, adapter): + adapter.set_options(role_id=None) + with pytest.raises(HashiVaultValueError, match=r'^role_id is required for azure authentication\.$'): + auth_azure.validate() + + @pytest.mark.parametrize('mount_point', [None, 'other'], ids=lambda x: 'mount_point=%s' % x) + @pytest.mark.parametrize('role_id', ['role1', 'role2'], ids=lambda x: 'role_id=%s' % x) + @pytest.mark.parametrize('jwt', ['jwt1', 'jwt2'], ids=lambda x: 'jwt=%s' % x) + def test_auth_azure_validate_use_jwt( + self, auth_azure, adapter, role_id, mount_point, jwt + ): + adapter.set_options( + role_id=role_id, + mount_point=mount_point, + jwt=jwt, + ) + + auth_azure.validate() + + params = auth_azure._auth_azure_login_params + + assert (mount_point is None and 'mount_point' not in params) or params['mount_point'] == mount_point + assert params['role'] == role_id + assert params['jwt'] == jwt + + @pytest.mark.parametrize('mount_point', [None, 'other'], ids=lambda x: 'mount_point=%s' % x) + @pytest.mark.parametrize('use_token', [True, False], ids=lambda x: 'use_token=%s' % x) + def test_auth_azure_authenticate_use_jwt( + self, + auth_azure, + client, + adapter, + mount_point, + jwt, + use_token, + azure_login_response, + ): + adapter.set_options( + mount_point=mount_point, + jwt=jwt, + ) + + auth_azure.validate() + + params = auth_azure._auth_azure_login_params.copy() + + with mock.patch.object( + client.auth.azure, 'login', return_value=azure_login_response + ) as azure_login: + response = auth_azure.authenticate(client, use_token=use_token) + azure_login.assert_called_once_with(use_token=use_token, **params) + + assert ( + response['auth']['client_token'] + == azure_login_response['auth']['client_token'] + ) + + def test_auth_azure_validate_use_identity_no_azure_identity_lib( + self, auth_azure, mock_import_error, adapter + ): + adapter.set_options() + with mock_import_error('azure.identity'): + with pytest.raises( + HashiVaultValueError, match=r'azure-identity is required' + ): + auth_azure.validate() + + @pytest.mark.parametrize('azure_tenant_id', ['tenant1', 'tenant2'], ids=lambda x: 'azure_tenant_id=%s' % x) + @pytest.mark.parametrize('azure_client_id', ['client1', 'client2'], ids=lambda x: 'azure_client_id=%s' % x) + @pytest.mark.parametrize('azure_client_secret', ['secret1', 'secret2'], ids=lambda x: 'azure_client_secret=%s' % x) + @pytest.mark.parametrize('jwt', ['jwt1', 'jwt2'], ids=lambda x: 'jwt=%s' % x) + def test_auth_azure_validate_use_service_principal( + self, + auth_azure, + adapter, + jwt, + azure_tenant_id, + azure_client_id, + azure_client_secret, + ): + adapter.set_options( + azure_tenant_id=azure_tenant_id, + azure_client_id=azure_client_id, + azure_client_secret=azure_client_secret, + ) + + with mock.patch( + 'azure.identity.ClientSecretCredential' + ) as mocked_credential_class: + credential = mocked_credential_class.return_value + credential.get_token.return_value.token = jwt + auth_azure.validate() + + assert mocked_credential_class.called_once_with( + azure_tenant_id, azure_client_id, azure_client_secret + ) + assert credential.get_token.called_once_with( + 'https://management.azure.com//.default' + ) + + params = auth_azure._auth_azure_login_params + assert params['jwt'] == jwt + + def test_auth_azure_validate_use_service_principal_no_tenant_id( + self, auth_azure, adapter, azure_client_id, azure_client_secret + ): + adapter.set_options( + azure_client_id=azure_client_id, + azure_client_secret=azure_client_secret, + ) + + with pytest.raises(HashiVaultValueError, match='azure_tenant_id is required'): + auth_azure.validate() + + @pytest.mark.parametrize('azure_client_id', ['client1', 'client2'], ids=lambda x: 'azure_client_id=%s' % x) + @pytest.mark.parametrize('jwt', ['jwt1', 'jwt2'], ids=lambda x: 'jwt=%s' % x) + def test_auth_azure_validate_use_user_managed_identity( + self, auth_azure, adapter, jwt, azure_client_id + ): + adapter.set_options( + azure_client_id=azure_client_id, + ) + + with mock.patch( + 'azure.identity.ManagedIdentityCredential' + ) as mocked_credential_class: + credential = mocked_credential_class.return_value + credential.get_token.return_value.token = jwt + auth_azure.validate() + + assert mocked_credential_class.called_once_with(azure_client_id) + assert credential.get_token.called_once_with( + 'https://management.azure.com//.default' + ) + + params = auth_azure._auth_azure_login_params + assert params['jwt'] == jwt + + @pytest.mark.parametrize('jwt', ['jwt1', 'jwt2'], ids=lambda x: 'jwt=%s' % x) + def test_auth_azure_validate_use_system_managed_identity( + self, auth_azure, adapter, jwt + ): + adapter.set_options() + + with mock.patch( + 'azure.identity.ManagedIdentityCredential' + ) as mocked_credential_class: + credential = mocked_credential_class.return_value + credential.get_token.return_value.token = jwt + auth_azure.validate() + + assert mocked_credential_class.called_once_with() + assert credential.get_token.called_once_with( + 'https://management.azure.com//.default' + ) + + params = auth_azure._auth_azure_login_params + assert params['jwt'] == jwt diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_cert.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_cert.py new file mode 100644 index 000000000..b8cc4c14b --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_cert.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Devon Mar (@devon-mar) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest + +from ansible_collections.community.hashi_vault.tests.unit.compat import mock + +from ansible_collections.community.hashi_vault.plugins.module_utils._auth_method_cert import ( + HashiVaultAuthMethodCert, +) + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import ( + HashiVaultAuthMethodBase, + HashiVaultValueError, +) + + +@pytest.fixture +def auth_cert(adapter, warner, deprecator): + return HashiVaultAuthMethodCert(adapter, warner, deprecator) + + +@pytest.fixture +def cert_login_response(fixture_loader): + return fixture_loader("cert_login_response.json") + + +class TestAuthCert(object): + + def test_auth_cert_is_auth_method_base(self, auth_cert): + assert isinstance(auth_cert, HashiVaultAuthMethodCert) + assert issubclass(HashiVaultAuthMethodCert, HashiVaultAuthMethodBase) + + def test_auth_cert_validate_direct(self, auth_cert, adapter): + adapter.set_option("cert_auth_public_key", "/fake/path") + adapter.set_option("cert_auth_private_key", "/fake/path") + + auth_cert.validate() + + @pytest.mark.parametrize("opt_patch", [ + {}, + {"cert_auth_public_key": ""}, + {"cert_auth_private_key": ""}, + {"mount_point": ""} + ]) + def test_auth_cert_validate_xfailures(self, auth_cert, adapter, opt_patch): + adapter.set_options(**opt_patch) + + with pytest.raises(HashiVaultValueError, match=r"Authentication method cert requires options .*? to be set, but these are missing:"): + auth_cert.validate() + + @pytest.mark.parametrize("use_token", [True, False], ids=lambda x: "use_token=%s" % x) + @pytest.mark.parametrize("mount_point", [None, "other"], ids=lambda x: "mount_point=%s" % x) + @pytest.mark.parametrize("role_id", [None, "cert"], ids=lambda x: "role_id=%s" % x) + def test_auth_cert_authenticate(self, auth_cert, client, adapter, mount_point, use_token, role_id, + cert_login_response): + adapter.set_option("cert_auth_public_key", "/fake/path") + adapter.set_option("cert_auth_private_key", "/fake/path") + adapter.set_option("role_id", role_id) + adapter.set_option("mount_point", mount_point) + + expected_login_params = { + "cert_pem": "/fake/path", + "key_pem": "/fake/path", + "use_token": use_token, + } + + if role_id: + expected_login_params["name"] = role_id + + if mount_point: + expected_login_params["mount_point"] = mount_point + + def _set_client_token(*args, **kwargs): + if kwargs['use_token']: + client.token = cert_login_response['auth']['client_token'] + return cert_login_response + + with mock.patch.object(client.auth.cert, "login", side_effect=_set_client_token) as cert_login: + response = auth_cert.authenticate(client, use_token=use_token) + cert_login.assert_called_once_with(**expected_login_params) + + assert response["auth"]["client_token"] == cert_login_response["auth"]["client_token"] + assert (client.token == cert_login_response["auth"]["client_token"]) is use_token diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_jwt.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_jwt.py new file mode 100644 index 000000000..f5971ae00 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_jwt.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest + +from ansible_collections.community.hashi_vault.tests.unit.compat import mock + +from ansible_collections.community.hashi_vault.plugins.module_utils._auth_method_jwt import ( + HashiVaultAuthMethodJwt, +) + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import ( + HashiVaultAuthMethodBase, + HashiVaultValueError, +) + + +@pytest.fixture +def option_dict(): + return { + 'auth_method': 'jwt', + 'jwt': None, + 'role_id': None, + 'mount_point': None, + } + + +@pytest.fixture +def jwt(): + return 'opaque' + + +@pytest.fixture +def role_id(): + return 'fake-role' + + +@pytest.fixture +def auth_jwt(adapter, warner, deprecator): + return HashiVaultAuthMethodJwt(adapter, warner, deprecator) + + +@pytest.fixture +def jwt_login_response(fixture_loader): + return fixture_loader('jwt_login_response.json') + + +class TestAuthJwt(object): + + def test_auth_jwt_is_auth_method_base(self, auth_jwt): + assert isinstance(auth_jwt, HashiVaultAuthMethodJwt) + assert issubclass(HashiVaultAuthMethodJwt, HashiVaultAuthMethodBase) + + def test_auth_jwt_validate_direct(self, auth_jwt, adapter, jwt, role_id): + adapter.set_option('jwt', jwt) + adapter.set_option('role_id', role_id) + + auth_jwt.validate() + + @pytest.mark.parametrize('opt_patch', [ + {}, + {'role_id': 'role_id-only'}, + {'jwt': 'jwt-only'} + ]) + def test_auth_jwt_validate_xfailures(self, auth_jwt, adapter, opt_patch): + adapter.set_options(**opt_patch) + + with pytest.raises(HashiVaultValueError, match=r'Authentication method jwt requires options .*? to be set, but these are missing:'): + auth_jwt.validate() + + @pytest.mark.parametrize('use_token', [True, False], ids=lambda x: 'use_token=%s' % x) + @pytest.mark.parametrize('mount_point', [None, 'other'], ids=lambda x: 'mount_point=%s' % x) + def test_auth_jwt_authenticate(self, auth_jwt, client, adapter, jwt, role_id, mount_point, use_token, jwt_login_response): + adapter.set_option('jwt', jwt) + adapter.set_option('role_id', role_id) + adapter.set_option('mount_point', mount_point) + + expected_login_params = { + 'jwt': jwt, + 'role': role_id, + } + if mount_point: + expected_login_params['path'] = mount_point + + with mock.patch.object(client.auth.jwt, 'jwt_login', return_value=jwt_login_response) as jwt_login: + response = auth_jwt.authenticate(client, use_token=use_token) + jwt_login.assert_called_once_with(**expected_login_params) + + assert response['auth']['client_token'] == jwt_login_response['auth']['client_token'] + assert (client.token == jwt_login_response['auth']['client_token']) is use_token diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_ldap.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_ldap.py new file mode 100644 index 000000000..0e6d1007f --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_ldap.py @@ -0,0 +1,103 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest + +from ansible_collections.community.hashi_vault.tests.unit.compat import mock + +from ansible_collections.community.hashi_vault.plugins.module_utils._auth_method_ldap import ( + HashiVaultAuthMethodLdap, +) + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import ( + HashiVaultAuthMethodBase, + HashiVaultValueError, +) + + +@pytest.fixture +def option_dict(): + return { + 'auth_method': 'ldap', + 'username': None, + 'password': None, + 'mount_point': None, + } + + +@pytest.fixture +def ldap_username(): + return 'ldapuser' + + +@pytest.fixture +def ldap_password(): + return 's3cret' + + +@pytest.fixture +def auth_ldap(adapter, warner, deprecator): + return HashiVaultAuthMethodLdap(adapter, warner, deprecator) + + +@pytest.fixture +def ldap_login_response(fixture_loader): + return fixture_loader('ldap_login_response.json') + + +class TestAuthLdap(object): + + def test_auth_ldap_is_auth_method_base(self, auth_ldap): + assert isinstance(auth_ldap, HashiVaultAuthMethodLdap) + assert issubclass(HashiVaultAuthMethodLdap, HashiVaultAuthMethodBase) + + @pytest.mark.parametrize('mount_point', [None, 'other'], ids=lambda x: 'mount_point=%s' % x) + def test_auth_ldap_validate(self, auth_ldap, adapter, ldap_username, ldap_password, mount_point): + adapter.set_options(username=ldap_username, password=ldap_password, mount_point=mount_point) + + auth_ldap.validate() + + @pytest.mark.parametrize('opt_patch', [ + {'username': 'user-only'}, + {'password': 'password-only'}, + ]) + def test_auth_ldap_validate_xfailures(self, auth_ldap, adapter, opt_patch): + adapter.set_options(**opt_patch) + + with pytest.raises(HashiVaultValueError, match=r'Authentication method ldap requires options .*? to be set, but these are missing:'): + auth_ldap.validate() + + @pytest.mark.parametrize('use_token', [True, False], ids=lambda x: 'use_token=%s' % x) + @pytest.mark.parametrize('mount_point', [None, 'other'], ids=lambda x: 'mount_point=%s' % x) + def test_auth_ldap_authenticate( + self, auth_ldap, client, adapter, ldap_password, ldap_username, mount_point, use_token, ldap_login_response + ): + adapter.set_option('username', ldap_username) + adapter.set_option('password', ldap_password) + adapter.set_option('mount_point', mount_point) + + expected_login_params = { + 'username': ldap_username, + 'password': ldap_password, + } + if mount_point: + expected_login_params['mount_point'] = mount_point + + auth_ldap.validate() + + def _set_client_token(*args, **kwargs): + if kwargs['use_token']: + client.token = ldap_login_response['auth']['client_token'] + return ldap_login_response + + with mock.patch.object(client.auth.ldap, 'login', side_effect=_set_client_token) as ldap_login: + response = auth_ldap.authenticate(client, use_token=use_token) + ldap_login.assert_called_once_with(use_token=use_token, **expected_login_params) + + assert response['auth']['client_token'] == ldap_login_response['auth']['client_token'] + assert (client.token == ldap_login_response['auth']['client_token']) is use_token diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_none.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_none.py new file mode 100644 index 000000000..1e3024f11 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_none.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest + +from ......plugins.module_utils._auth_method_none import HashiVaultAuthMethodNone +from ......plugins.module_utils._hashi_vault_common import HashiVaultAuthMethodBase + + +@pytest.fixture +def auth_none(adapter, warner, deprecator): + return HashiVaultAuthMethodNone(adapter, warner, deprecator) + + +class TestAuthNone(object): + + def test_auth_none_is_auth_method_base(self, auth_none): + assert issubclass(type(auth_none), HashiVaultAuthMethodBase) + + def test_auth_none_validate(self, auth_none): + auth_none.validate() + + @pytest.mark.parametrize('use_token', [True, False]) + def test_auth_none_authenticate(self, auth_none, client, use_token): + result = auth_none.authenticate(client, use_token=use_token) + + assert result is None + assert client.token is None diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_token.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_token.py new file mode 100644 index 000000000..d8a13435b --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_token.py @@ -0,0 +1,186 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import pytest + +from ......tests.unit.compat import mock + +try: + import hvac +except ImportError: + # python 2.6, which isn't supported anyway + hvac = mock.MagicMock() + +from ......plugins.module_utils._auth_method_token import ( + HashiVaultAuthMethodToken, +) + +from ......plugins.module_utils._hashi_vault_common import ( + HashiVaultAuthMethodBase, + HashiVaultValueError, +) + + +@pytest.fixture +def option_dict(): + return { + 'auth_method': 'fake', + 'token': None, + 'token_path': None, + 'token_file': '.vault-token', + 'token_validate': True, + } + + +@pytest.fixture +def token(): + return 'opaque' + + +@pytest.fixture +def auth_token(adapter, warner, deprecator): + return HashiVaultAuthMethodToken(adapter, warner, deprecator) + + +@pytest.fixture(params=['lookup-self_with_meta.json', 'lookup-self_without_meta.json']) +def lookup_self_response(fixture_loader, request): + return fixture_loader(request.param) + + +@pytest.fixture +def token_file_path(fixture_loader): + return fixture_loader('vault-token', parse='path') + + +@pytest.fixture +def token_file_content(fixture_loader): + return fixture_loader('vault-token', parse='raw').strip() + + +@pytest.fixture(params=[hvac.exceptions.InvalidRequest(), hvac.exceptions.Forbidden(), hvac.exceptions.InvalidPath()]) +def validation_failure(request): + return request.param + + +class TestAuthToken(object): + + def test_auth_token_is_auth_method_base(self, auth_token): + assert isinstance(auth_token, HashiVaultAuthMethodToken) + assert issubclass(HashiVaultAuthMethodToken, HashiVaultAuthMethodBase) + + def test_simulate_login_response(self, auth_token, token): + response = auth_token._simulate_login_response(token) + expected = { + 'auth': { + 'client_token': token + } + } + + assert response == expected + + def test_simulate_login_response_with_lookup(self, auth_token, token, lookup_self_response): + response = auth_token._simulate_login_response(token, lookup_self_response) + + assert 'auth' in response + assert response['auth']['client_token'] == token + + if 'meta' not in lookup_self_response['data']: + return + assert 'meta' not in response['auth'] + assert lookup_self_response['data']['meta'] == response['auth']['metadata'] + + def test_auth_token_validate_direct(self, auth_token, adapter, token): + adapter.set_option('token', token) + + auth_token.validate() + + assert adapter.get_option('token') == token + + def test_auth_token_validate_by_path(self, auth_token, adapter, token_file_path, token_file_content): + head, tail = os.path.split(token_file_path) + adapter.set_option('token_path', head) + adapter.set_option('token_file', tail) + + auth_token.validate() + + assert adapter.get_option('token') == token_file_content + + @pytest.mark.parametrize('opt_patch', [ + {}, + {'token_path': '/tmp', 'token_file': '__fake_no_file'}, + ]) + def test_auth_token_validate_xfailures(self, auth_token, adapter, opt_patch): + adapter.set_options(**opt_patch) + + with pytest.raises(HashiVaultValueError, match=r'No Vault Token specified or discovered'): + auth_token.validate() + + def test_auth_token_file_is_directory(self, auth_token, adapter, tmp_path): + # ensure that a token_file that exists but is a directory is treated the same as it not being found + # see also: https://github.com/ansible-collections/community.hashi_vault/issues/152 + adapter.set_options(token_path=str(tmp_path.parent), token_file=str(tmp_path)) + + with pytest.raises(HashiVaultValueError, match=r"The Vault token file '[^']+' was found but is not a file."): + auth_token.validate() + + @pytest.mark.parametrize('use_token', [True, False], ids=lambda x: 'use_token=%s' % x) + @pytest.mark.parametrize('lookup_self', [True, False], ids=lambda x: 'lookup_self=%s' % x) + @pytest.mark.parametrize('token_validate', [True, False], ids=lambda x: 'token_validate=%s' % x) + def test_auth_token_authenticate(self, auth_token, client, adapter, token, use_token, token_validate, lookup_self, lookup_self_response): + adapter.set_option('token', token) + adapter.set_option('token_validate', token_validate) + + expected_lookup_value = lookup_self_response if use_token and (lookup_self or token_validate) else None + + with mock.patch.object(auth_token, '_simulate_login_response', wraps=auth_token._simulate_login_response) as sim_login: + with mock.patch.object(client.auth.token, 'lookup_self', return_value=lookup_self_response): + response = auth_token.authenticate(client, use_token=use_token, lookup_self=lookup_self) + + sim_login.assert_called_once_with(token, expected_lookup_value) + + assert response['auth']['client_token'] == token + assert (client.token == token) is use_token + + def test_auth_token_authenticate_success_on_no_validate(self, auth_token, adapter, client, token, validation_failure): + adapter.set_option('token', token) + adapter.set_option('token_validate', False) + + raiser = mock.Mock() + raiser.side_effect = validation_failure + + with mock.patch.object(auth_token, '_simulate_login_response', wraps=auth_token._simulate_login_response) as sim_login: + with mock.patch.object(client.auth.token, 'lookup_self', raiser): + response = auth_token.authenticate(client, use_token=True, lookup_self=True) + + sim_login.assert_called_once_with(token, None) + + assert response['auth']['client_token'] == token + assert client.token == token + + def test_auth_token_authenticate_failed_validation(self, auth_token, adapter, client, token, validation_failure): + adapter.set_option('token', token) + adapter.set_option('token_validate', True) + + raiser = mock.Mock() + raiser.side_effect = validation_failure + + with pytest.raises(HashiVaultValueError, match=r'Invalid Vault Token Specified'): + with mock.patch.object(client.auth.token, 'lookup_self', raiser): + auth_token.authenticate(client, use_token=True, lookup_self=False) + + @pytest.mark.parametrize('exc', [AttributeError, NotImplementedError]) + def test_auth_token_authenticate_old_lookup_self(self, auth_token, adapter, client, token, exc): + adapter.set_option('token', token) + + with mock.patch.object(client, 'lookup_token') as legacy_lookup: + with mock.patch.object(client.auth.token, 'lookup_self', side_effect=exc) as lookup: + auth_token.authenticate(client, use_token=True, lookup_self=True) + + legacy_lookup.assert_called_once_with() + lookup.assert_called_once_with() diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_userpass.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_userpass.py new file mode 100644 index 000000000..27ffafb92 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_auth_userpass.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest + +from ansible_collections.community.hashi_vault.tests.unit.compat import mock + +from ansible_collections.community.hashi_vault.plugins.module_utils._auth_method_userpass import ( + HashiVaultAuthMethodUserpass, +) + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import ( + HashiVaultAuthMethodBase, + HashiVaultValueError, +) + + +@pytest.fixture +def option_dict(): + return { + 'auth_method': 'userpass', + 'username': None, + 'password': None, + 'mount_point': None, + } + + +@pytest.fixture +def userpass_password(): + return 'opaque' + + +@pytest.fixture +def userpass_username(): + return 'fake-user' + + +@pytest.fixture +def auth_userpass(adapter, warner, deprecator): + return HashiVaultAuthMethodUserpass(adapter, warner, deprecator) + + +@pytest.fixture +def userpass_login_response(fixture_loader): + return fixture_loader('userpass_login_response.json') + + +class TestAuthUserpass(object): + + def test_auth_userpass_is_auth_method_base(self, auth_userpass): + assert isinstance(auth_userpass, HashiVaultAuthMethodUserpass) + assert issubclass(HashiVaultAuthMethodUserpass, HashiVaultAuthMethodBase) + + def test_auth_userpass_validate_direct(self, auth_userpass, adapter, userpass_username, userpass_password): + adapter.set_option('username', userpass_username) + adapter.set_option('password', userpass_password) + + auth_userpass.validate() + + @pytest.mark.parametrize('opt_patch', [ + {'username': 'user-only'}, + {'password': 'password-only'}, + ]) + def test_auth_userpass_validate_xfailures(self, auth_userpass, adapter, opt_patch): + adapter.set_options(**opt_patch) + + with pytest.raises(HashiVaultValueError, match=r'Authentication method userpass requires options .*? to be set, but these are missing:'): + auth_userpass.validate() + + @pytest.mark.parametrize('use_token', [True, False], ids=lambda x: 'use_token=%s' % x) + @pytest.mark.parametrize('mount_point', [None, 'other'], ids=lambda x: 'mount_point=%s' % x) + def test_auth_userpass_authenticate( + self, auth_userpass, client, adapter, userpass_password, userpass_username, mount_point, use_token, userpass_login_response + ): + adapter.set_option('username', userpass_username) + adapter.set_option('password', userpass_password) + adapter.set_option('mount_point', mount_point) + + expected_login_params = { + 'username': userpass_username, + 'password': userpass_password, + } + if mount_point: + expected_login_params['mount_point'] = mount_point + + def _set_client_token(*args, **kwargs): + return userpass_login_response + + with mock.patch.object(client.auth.userpass, 'login', side_effect=_set_client_token) as userpass_login: + response = auth_userpass.authenticate(client, use_token=use_token) + userpass_login.assert_called_once_with(**expected_login_params) + + assert response['auth']['client_token'] == userpass_login_response['auth']['client_token'] + assert (client.token == userpass_login_response['auth']['client_token']) is use_token diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_hashi_vault_auth_method_base.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_hashi_vault_auth_method_base.py new file mode 100644 index 000000000..828cbdefc --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_hashi_vault_auth_method_base.py @@ -0,0 +1,86 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest + +from ansible_collections.community.hashi_vault.tests.unit.compat import mock + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import ( + HashiVaultAuthMethodBase, + HashiVaultOptionGroupBase, + HashiVaultValueError, + _stringify, +) + + +@pytest.fixture +def auth_base(adapter, warner, deprecator): + return HashiVaultAuthMethodBase(adapter, warner, deprecator) + + +class TestHashiVaultAuthMethodBase(object): + + def test_auth_method_is_option_group_base(self, fake_auth_class): + assert issubclass(type(fake_auth_class), HashiVaultOptionGroupBase) + + def test_base_validate_not_implemented(self, auth_base): + with pytest.raises(NotImplementedError): + auth_base.validate() + + def test_base_authenticate_not_implemented(self, auth_base, client): + with pytest.raises(NotImplementedError): + auth_base.authenticate(client) + + @pytest.mark.parametrize('options,required', [ + ({}, []), + ({'a': 1, 'b': '2'}, ['b']), + ({'a': 1, 'b': '2'}, ['a', 'b']), + ({'a': 1, 'b': '2', 'c': 3.0}, ['a', 'c']) + ]) + def test_validate_by_required_fields_success(self, auth_base, adapter, options, required): + adapter.set_options(**options) + + auth_base.validate_by_required_fields(*required) + + @pytest.mark.parametrize('options,required', [ + ({}, ['a']), + ({'a': 1, 'b': '2'}, ['c']), + ({'a': 1, 'b': '2'}, ['a', 'c']), + ({'a': 1, 'b': '2', 'c': 3.0}, ['a', 'c', 'd']) + ]) + def test_validate_by_required_fields_failure(self, fake_auth_class, adapter, options, required): + adapter.set_options(**options) + + with pytest.raises(HashiVaultValueError): + fake_auth_class.validate_by_required_fields(*required) + + def test_warning_callback(self, auth_base, warner): + msg = 'warning msg' + + auth_base.warn(msg) + + warner.assert_called_once_with(msg) + + @pytest.mark.parametrize('version', [None, '0.99.7']) + @pytest.mark.parametrize('date', [None, '2022']) + @pytest.mark.parametrize('collection_name', [None, 'ns.col']) + def test_deprecate_callback(self, auth_base, deprecator, version, date, collection_name): + msg = 'warning msg' + + auth_base.deprecate(msg, version, date, collection_name) + + deprecator.assert_called_once_with(msg, version=version, date=date, collection_name=collection_name) + + def test_has_stringify(self, auth_base): + v = 'X' + wrapper = mock.Mock(wraps=_stringify) + with mock.patch('ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common._stringify', wrapper): + r = auth_base._stringify(v) + + wrapper.assert_called_once_with(v) + assert r == v diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_hashi_vault_authenticator.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_hashi_vault_authenticator.py new file mode 100644 index 000000000..4853d2c76 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/authentication/test_hashi_vault_authenticator.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest + +from ......plugins.module_utils._authenticator import HashiVaultAuthenticator + + +@pytest.fixture +def authenticator(fake_auth_class, adapter, warner, deprecator): + a = HashiVaultAuthenticator(adapter, warner, deprecator) + a._selector.update({fake_auth_class.NAME: fake_auth_class}) + + return a + + +class TestHashiVaultAuthenticator(object): + def test_method_validate_is_called(self, authenticator, fake_auth_class): + authenticator.validate() + + fake_auth_class.validate.assert_called_once() + + def test_validate_not_implemented(self, authenticator, fake_auth_class): + with pytest.raises(NotImplementedError): + authenticator.validate(method='missing') + + fake_auth_class.validate.assert_not_called() + + @pytest.mark.parametrize('args', [ + [], + ['one'], + ['one', 2, 'three'], + ]) + @pytest.mark.parametrize('kwargs', [ + {}, + {'one': 1}, + {'one': '1', 'two': 2}, + ]) + def test_method_authenticate_is_called(self, authenticator, fake_auth_class, args, kwargs): + authenticator.authenticate(*args, **kwargs) + + fake_auth_class.authenticate.assert_called_once_with(*args, **kwargs) + + def test_authenticate_not_implemented(self, authenticator, fake_auth_class): + with pytest.raises(NotImplementedError): + authenticator.validate(method='missing') + + fake_auth_class.authenticate.assert_not_called() + + def test_get_method_object_explicit(self, authenticator): + for auth_method, obj in authenticator._selector.items(): + assert authenticator._get_method_object(method=auth_method) == obj + + def test_get_method_object_missing(self, authenticator): + with pytest.raises(NotImplementedError, match=r"auth method 'missing' is not implemented in HashiVaultAuthenticator"): + authenticator._get_method_object(method='missing') + + def test_get_method_object_implicit(self, authenticator, adapter, fake_auth_class): + adapter.set_option('auth_method', fake_auth_class.NAME) + + obj = authenticator._get_method_object() + + assert isinstance(obj, type(fake_auth_class)) diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/option_adapter/conftest.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/option_adapter/conftest.py new file mode 100644 index 000000000..5357addf4 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/option_adapter/conftest.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +# this file must define the "adapter" fixture at a minimum, +# and anything else that it needs or depends on that isn't already defined in in the test files themselves. + +# Keep in mind that this one is for module_utils and so it cannot depend on or import any controller-side code. + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultOptionAdapter + +import pytest + + +class FakeAnsibleModule: + '''HashiVaultOptionAdapter.from_ansible_module() only cares about the AnsibleModule.params dict''' + + def __init__(self, params): + self.params = params + + +@pytest.fixture +def ansible_module(sample_dict): + return FakeAnsibleModule(sample_dict) + + +@pytest.fixture +def adapter_from_ansible_module(ansible_module): + def _create_adapter_from_ansible_module(): + return HashiVaultOptionAdapter.from_ansible_module(ansible_module) + + return _create_adapter_from_ansible_module + + +@pytest.fixture(params=['dict', 'dict_defaults', 'ansible_module']) +def adapter(request, adapter_from_dict, adapter_from_dict_defaults, adapter_from_ansible_module): + return { + 'dict': adapter_from_dict, + 'dict_defaults': adapter_from_dict_defaults, + 'ansible_module': adapter_from_ansible_module, + }[request.param]() diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/option_adapter/test_hashi_vault_option_adapter.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/option_adapter/test_hashi_vault_option_adapter.py new file mode 100644 index 000000000..d3e205d68 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/option_adapter/test_hashi_vault_option_adapter.py @@ -0,0 +1,211 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultOptionAdapter + + +SAMPLE_DICT = { + 'key1': 'val1', + 'key2': 2, + 'key3': 'three', + 'key4': 'iiii', + 'key5': None, +} + +SAMPLE_KEYS = sorted(list(SAMPLE_DICT.keys())) + +MISSING_KEYS = ['no', 'nein', 'iie'] + + +class SentinelMarker(): + pass + + +MARKER = SentinelMarker() + + +@pytest.fixture() +def sample_dict(): + return SAMPLE_DICT.copy() + + +@pytest.fixture +def adapter_from_dict(sample_dict): + def _create_adapter_from_dict(): + return HashiVaultOptionAdapter.from_dict(sample_dict) + + return _create_adapter_from_dict + + +@pytest.fixture +def adapter_from_dict_defaults(sample_dict): + # the point of this one is to test the "default" methods provided by the adapter + # for everything except getter and setter, so we only supply those two required methods + def _create_adapter_from_dict_defaults(): + return HashiVaultOptionAdapter(getter=sample_dict.__getitem__, setter=sample_dict.__setitem__) + + return _create_adapter_from_dict_defaults + + +@pytest.fixture +def filter_all(): + return lambda k, v: True + + +@pytest.fixture +def filter_none(): + return lambda k, v: False + + +@pytest.fixture +def filter_value_not_none(): + return lambda k, v: v is not None + + +@pytest.fixture +def filter_key_in_range(): + return lambda k, v: k in SAMPLE_KEYS[1:3] + + +class TestHashiVaultOptionAdapter(object): + + @pytest.mark.parametrize('option', SAMPLE_KEYS) + def test_get_option_succeeds(self, adapter, option): + value = adapter.get_option(option) + + assert value == SAMPLE_DICT[option] + + @pytest.mark.parametrize('option', MISSING_KEYS) + def test_get_option_missing_raises(self, adapter, option): + with pytest.raises(KeyError): + adapter.get_option(option) + + @pytest.mark.parametrize('option', SAMPLE_KEYS) + def test_get_option_default_succeeds(self, adapter, option): + value = adapter.get_option_default(option, MARKER) + + assert value == SAMPLE_DICT[option] + + @pytest.mark.parametrize('option', MISSING_KEYS) + def test_get_option_default_missing_returns_default(self, adapter, option): + value = adapter.get_option_default(option, MARKER) + + assert isinstance(value, SentinelMarker) + + @pytest.mark.parametrize('option,expected', [(o, False) for o in MISSING_KEYS] + [(o, True) for o in SAMPLE_KEYS]) + def test_has_option(self, adapter, option, expected): + assert adapter.has_option(option) == expected + + @pytest.mark.parametrize('value', ['__VALUE']) + @pytest.mark.parametrize('option', (SAMPLE_KEYS + MISSING_KEYS)) + def test_set_option(self, adapter, option, value, sample_dict): + adapter.set_option(option, value) + + # first check the underlying data, then ensure the adapter refelcts the change too + assert sample_dict[option] == value + assert adapter.get_option(option) == value + + @pytest.mark.parametrize('default', [MARKER]) + @pytest.mark.parametrize('option,expected', [(o, SAMPLE_DICT[o]) for o in SAMPLE_KEYS] + [(o, MARKER) for o in MISSING_KEYS]) + def test_set_option_default(self, adapter, option, default, expected, sample_dict): + value = adapter.set_option_default(option, default) + + # check return data, underlying data structure, and adapter retrieval + assert value == expected + assert sample_dict[option] == expected + assert adapter.get_option(option) == expected + + @pytest.mark.parametrize('options', [[SAMPLE_KEYS[0], MISSING_KEYS[0]]]) + def test_set_options(self, adapter, options, sample_dict): + update = dict([(o, '__VALUE_%i' % i) for i, o in enumerate(options)]) + + adapter.set_options(**update) + + for k in SAMPLE_KEYS: + expected = update[k] if k in update else SAMPLE_DICT[k] + assert sample_dict[k] == expected + assert adapter.get_option(k) == expected + + for k in MISSING_KEYS: + if k in update: + assert sample_dict[k] == update[k] + assert adapter.get_option(k) == update[k] + else: + assert k not in sample_dict + assert not adapter.has_option(k) + + @pytest.mark.parametrize('options', [[SAMPLE_KEYS[0], MISSING_KEYS[0]]]) + def test_get_options_mixed(self, adapter, options): + with pytest.raises(KeyError): + adapter.get_options(*options) + + @pytest.mark.parametrize('options', [MISSING_KEYS[0:2]]) + def test_get_options_missing(self, adapter, options): + with pytest.raises(KeyError): + adapter.get_options(*options) + + @pytest.mark.parametrize('options', [SAMPLE_KEYS[0:2]]) + def test_get_options_exists(self, adapter, options): + expected = dict([(k, SAMPLE_DICT[k]) for k in options]) + + result = adapter.get_options(*options) + + assert result == expected + + @pytest.mark.parametrize('options', [[SAMPLE_KEYS[0], MISSING_KEYS[0]]]) + def test_get_filtered_options_mixed(self, adapter, options, filter_all): + with pytest.raises(KeyError): + adapter.get_filtered_options(filter_all, *options) + + @pytest.mark.parametrize('options', [MISSING_KEYS[0:2]]) + def test_get_filtered_options_missing(self, adapter, options, filter_all): + with pytest.raises(KeyError): + adapter.get_filtered_options(filter_all, *options) + + @pytest.mark.parametrize('options', [SAMPLE_KEYS]) + def test_get_filtered_options_all(self, adapter, options, filter_all): + expected = dict([(k, SAMPLE_DICT[k]) for k in options]) + + result = adapter.get_filtered_options(filter_all, *options) + + assert result == expected + assert result == adapter.get_options(*options) + + @pytest.mark.parametrize('options', [SAMPLE_KEYS]) + def test_get_filtered_options_none(self, adapter, options, filter_none): + expected = {} + + result = adapter.get_filtered_options(filter_none, *options) + + assert result == expected + + @pytest.mark.parametrize('options', [SAMPLE_KEYS]) + def test_get_filtered_options_by_value(self, adapter, options, filter_value_not_none): + expected = dict([(k, SAMPLE_DICT[k]) for k in options if SAMPLE_DICT[k] is not None]) + + result = adapter.get_filtered_options(filter_value_not_none, *options) + + assert result == expected + + @pytest.mark.parametrize('options', [SAMPLE_KEYS]) + def test_get_filtered_options_by_key(self, adapter, options, filter_key_in_range): + expected = dict([(k, SAMPLE_DICT[k]) for k in options if k in SAMPLE_KEYS[1:3]]) + + result = adapter.get_filtered_options(filter_key_in_range, *options) + + assert result == expected + + @pytest.mark.parametrize('options', [SAMPLE_KEYS]) + def test_get_filled_options(self, adapter, options): + expected = dict([(k, SAMPLE_DICT[k]) for k in options if SAMPLE_DICT[k] is not None]) + + result = adapter.get_filled_options(*options) + + assert result == expected diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/test_hashi_vault_connection_options.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/test_hashi_vault_connection_options.py new file mode 100644 index 000000000..8766388e9 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/test_hashi_vault_connection_options.py @@ -0,0 +1,272 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import pytest + +from ansible_collections.community.hashi_vault.tests.unit.compat import mock + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import ( + HashiVaultOptionGroupBase, + HashiVaultOptionAdapter, + HashiVaultValueError, +) + +from ansible_collections.community.hashi_vault.plugins.module_utils._connection_options import HashiVaultConnectionOptions + +from requests import Session + + +CONNECTION_OPTIONS = { + 'url': 'url-is-required', + 'proxies': None, + 'namespace': None, + 'validate_certs': None, + 'ca_cert': None, + 'timeout': None, + 'retries': None, + 'retry_action': 'warn', +} + + +@pytest.fixture +def predefined_options(): + return CONNECTION_OPTIONS.copy() + + +@pytest.fixture +def adapter(predefined_options): + return HashiVaultOptionAdapter.from_dict(predefined_options) + + +@pytest.fixture +def retry_callback_generator(): + def _cb(retry_action): + pass + return _cb + + +@pytest.fixture +def connection_options(adapter, retry_callback_generator): + return HashiVaultConnectionOptions(adapter, retry_callback_generator) + + +class TestHashiVaultConnectionOptions(object): + def test_connection_options_is_option_group(self, connection_options): + assert issubclass(type(connection_options), HashiVaultOptionGroupBase) + + # _boolean_or_cacert tests + # this method is the intersection of the validate_certs and ca_cert parameter + # along with the VAULT_SKIP_VERIFY environment variable (see the function defintion). + # The result is either a boolean, or a string, to be passed to the hvac client's + # verify parameter. + + @pytest.mark.parametrize( + 'optpatch,envpatch,expected', + [ + ({}, {}, True), + ({}, {'VAULT_SKIP_VERIFY': 'true'}, False), + ({}, {'VAULT_SKIP_VERIFY': 'false'}, True), + ({}, {'VAULT_SKIP_VERIFY': 'invalid'}, True), + ({'validate_certs': True}, {}, True), + ({'validate_certs': True}, {'VAULT_SKIP_VERIFY': 'false'}, True), + ({'validate_certs': True}, {'VAULT_SKIP_VERIFY': 'true'}, True), + ({'validate_certs': True}, {'VAULT_SKIP_VERIFY': 'invalid'}, True), + ({'validate_certs': False}, {}, False), + ({'validate_certs': False}, {'VAULT_SKIP_VERIFY': 'false'}, False), + ({'validate_certs': False}, {'VAULT_SKIP_VERIFY': 'true'}, False), + ({'validate_certs': False}, {'VAULT_SKIP_VERIFY': 'invalid'}, False), + ({'ca_cert': '/tmp/fake'}, {}, '/tmp/fake'), + ({'ca_cert': '/tmp/fake'}, {'VAULT_SKIP_VERIFY': 'true'}, False), + ({'ca_cert': '/tmp/fake'}, {'VAULT_SKIP_VERIFY': 'false'}, '/tmp/fake'), + ({'ca_cert': '/tmp/fake'}, {'VAULT_SKIP_VERIFY': 'invalid'}, '/tmp/fake'), + ({'ca_cert': '/tmp/fake', 'validate_certs': True}, {}, '/tmp/fake'), + ({'ca_cert': '/tmp/fake', 'validate_certs': True}, {'VAULT_SKIP_VERIFY': 'false'}, '/tmp/fake'), + ({'ca_cert': '/tmp/fake', 'validate_certs': True}, {'VAULT_SKIP_VERIFY': 'true'}, '/tmp/fake'), + ({'ca_cert': '/tmp/fake', 'validate_certs': True}, {'VAULT_SKIP_VERIFY': 'invalid'}, '/tmp/fake'), + ({'ca_cert': '/tmp/fake', 'validate_certs': False}, {}, False), + ({'ca_cert': '/tmp/fake', 'validate_certs': False}, {'VAULT_SKIP_VERIFY': 'false'}, False), + ({'ca_cert': '/tmp/fake', 'validate_certs': False}, {'VAULT_SKIP_VERIFY': 'true'}, False), + ({'ca_cert': '/tmp/fake', 'validate_certs': False}, {'VAULT_SKIP_VERIFY': 'invalid'}, False), + ] + ) + def test_boolean_or_cacert(self, connection_options, predefined_options, adapter, optpatch, envpatch, expected): + adapter.set_options(**optpatch) + + with mock.patch.dict(os.environ, envpatch): + connection_options._boolean_or_cacert() + + assert predefined_options['ca_cert'] == expected + + # _process_option_proxies + # proxies can be specified as a dictionary where key is protocol/scheme + # and value is the proxy address. A dictionary can also be supplied as a string + # representation of a dictionary in JSON format. + # If a string is supplied that cannot be interpreted as a JSON dictionary, then it + # is assumed to be a proxy address, and will be used as proxy for both the + # http and https protocols. + + @pytest.mark.parametrize( + 'optproxies,expected', + [ + (None, None), + ('socks://thecat', {'http': 'socks://thecat', 'https': 'socks://thecat'}), + ('{"http": "gopher://it"}', {'http': 'gopher://it'}), + ({'https': "smtp://mail.aol.com"}, {'https': "smtp://mail.aol.com"}), + ({'protoa': 'proxya', 'protob': 'proxyb', 'protoc': 'proxyc'}, {'protoa': 'proxya', 'protob': 'proxyb', 'protoc': 'proxyc'}), + ('{"protoa": "proxya", "protob": "proxyb", "protoc": "proxyc"}', {'protoa': 'proxya', 'protob': 'proxyb', 'protoc': 'proxyc'}), + ('{"protoa":"proxya","protob":"proxyb","protoc":"proxyc"}', {'protoa': 'proxya', 'protob': 'proxyb', 'protoc': 'proxyc'}), + ] + ) + def test_process_option_proxies(self, connection_options, predefined_options, adapter, optproxies, expected): + adapter.set_option('proxies', optproxies) + + connection_options._process_option_proxies() + + assert predefined_options['proxies'] == expected + + # _process_option_retries + # can be specified as a positive int or a dict + # (or any string that can be interpreted as one of those) + + @pytest.mark.parametrize('opt_retries', ['plz retry', ('1', '1'), [True], -1, 1.0]) + def test_process_option_retries_invalid(self, connection_options, predefined_options, adapter, opt_retries): + adapter.set_option('retries', opt_retries) + + with pytest.raises((TypeError, ValueError)): + connection_options._process_option_retries() + + @pytest.mark.parametrize('opt_retries', [None, 0, '0']) + def test_process_option_retries_none_result(self, connection_options, predefined_options, adapter, opt_retries): + adapter.set_option('retries', opt_retries) + + connection_options._process_option_retries() + + assert predefined_options['retries'] is None + + @pytest.mark.parametrize('opt_retries', [1, '1', 10, '30']) + def test_process_option_retries_from_number(self, connection_options, predefined_options, adapter, opt_retries): + expected = connection_options._RETRIES_DEFAULT_PARAMS.copy() + expected['total'] = int(float(opt_retries)) + + adapter.set_option('retries', opt_retries) + + connection_options._process_option_retries() + + assert predefined_options['retries'] == expected + + @pytest.mark.parametrize( + 'opt_retries,expected', + [ + ({}, {}), + ('{}', {}), + ({'total': 5}, {'total': 5}), + ('{"total": 9}', {'total': 9}), + ] + ) + def test_process_option_retries_from_dict(self, connection_options, predefined_options, adapter, opt_retries, expected): + adapter.set_option('retries', opt_retries) + + connection_options._process_option_retries() + + assert predefined_options['retries'] == expected + + # process_connection_options + # this is the public function of the class meant to ensure all option processing is complete + + def test_process_connection_options(self, mocker, connection_options, adapter): + # mock the internal methods we expect to be called + f_process_late_binding_env_vars = mocker.patch.object(connection_options, 'process_late_binding_env_vars') + f_boolean_or_cacert = mocker.patch.object(connection_options, '_boolean_or_cacert') + f_process_option_proxies = mocker.patch.object(connection_options, '_process_option_proxies') + f_process_option_retries = mocker.patch.object(connection_options, '_process_option_retries') + + # mock the adapter itself, so we can spy on adapter interactions + # since we're mocking out the methods we expect to call, we shouldn't see any + mock_adapter = mock.create_autospec(adapter) + connection_options._options = mock_adapter + + connection_options.process_connection_options() + + # assert the expected methods have been called once + f_process_late_binding_env_vars.assert_called_once() + f_boolean_or_cacert.assert_called_once() + f_process_option_proxies.assert_called_once() + f_process_option_retries.assert_called_once() + + # aseert that the adapter had no interactions (because we mocked out everything we knew about) + # the intention here is to catch a situation where process_connection_options has been modified + # to do some new behavior, without modifying this test. + assert mock_adapter.method_calls == [], 'Unexpected adapter interaction: %r' % mock_adapter.method_calls + + # get_hvac_connection_options + # gets the dict of params to pass to the hvac Client constructor + # based on the connection options we have in Ansible + + @pytest.mark.parametrize('opt_ca_cert', [None, '/tmp/fake']) + @pytest.mark.parametrize('opt_validate_certs', [None, True, False]) + @pytest.mark.parametrize('opt_namespace', [None, 'namepsace1']) + @pytest.mark.parametrize('opt_timeout', [None, 30]) + @pytest.mark.parametrize('opt_retries', [None, 0, 2, {'total': 3}, '{"total": 3}']) + @pytest.mark.parametrize('opt_retry_action', ['ignore', 'warn']) + @pytest.mark.parametrize('opt_proxies', [ + None, 'socks://noshow', '{"https": "https://prox", "http": "http://other"}', {'http': 'socks://one', 'https': 'socks://two'} + ]) + def test_get_hvac_connection_options( + self, connection_options, predefined_options, adapter, + opt_ca_cert, opt_validate_certs, opt_proxies, opt_namespace, opt_timeout, opt_retries, opt_retry_action, + ): + + option_set = { + 'ca_cert': opt_ca_cert, + 'validate_certs': opt_validate_certs, + 'proxies': opt_proxies, + 'namespace': opt_namespace, + 'timeout': opt_timeout, + 'retries': opt_retries, + 'retry_action': opt_retry_action, + } + adapter.set_options(**option_set) + + connection_options.process_connection_options() + opts = connection_options.get_hvac_connection_options() + + # these two will get swallowed up to become 'verify' + assert 'validate_certs' not in opts + assert 'ca_cert' not in opts + + # retry_action is used/removed in the configuration of retries (session) + assert 'retry_action' not in opts + + # retries will become session + assert 'retries' not in opts + + # these should always be returned + assert 'url' in opts and opts['url'] == predefined_options['url'] + assert 'verify' in opts and opts['verify'] == predefined_options['ca_cert'] + + # these are optional + assert 'proxies' not in opts or opts['proxies'] == predefined_options['proxies'] + assert 'namespace' not in opts or opts['namespace'] == predefined_options['namespace'] + assert 'timeout' not in opts or opts['timeout'] == predefined_options['timeout'] + assert 'session' not in opts or isinstance(opts['session'], Session) + + @mock.patch('ansible_collections.community.hashi_vault.plugins.module_utils._connection_options.HAS_RETRIES', new=False) + def test_get_hvac_connection_options_retry_not_available(self, connection_options, adapter): + adapter.set_option('retries', 2) + + connection_options.process_connection_options() + + with pytest.raises(NotImplementedError): + connection_options.get_hvac_connection_options() + + def test_url_is_required(self, connection_options, adapter): + adapter.set_option('url', None) + + with pytest.raises(HashiVaultValueError, match=r'Required option url was not set'): + connection_options.process_connection_options() diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/test_hashi_vault_helper.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/test_hashi_vault_helper.py new file mode 100644 index 000000000..6a5c6002b --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/test_hashi_vault_helper.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import pytest + +from .....tests.unit.compat import mock +from .....plugins.module_utils._hashi_vault_common import ( + HashiVaultHelper, + _stringify, +) + + +@pytest.fixture +def hashi_vault_helper(): + return HashiVaultHelper() + + +@pytest.fixture +def vault_token(): + return 'fake123' + + +@pytest.fixture +def vault_token_via_env(vault_token): + with mock.patch.dict(os.environ, {'VAULT_TOKEN': vault_token}): + yield + + +class TestHashiVaultHelper(object): + + def test_get_vault_client_without_logout_explicit_token(self, hashi_vault_helper, vault_token): + client = hashi_vault_helper.get_vault_client(token=vault_token) + + assert client.token == vault_token + + def test_get_vault_client_without_logout_implicit_token(self, hashi_vault_helper, vault_token, vault_token_via_env): + client = hashi_vault_helper.get_vault_client(hashi_vault_logout_inferred_token=False) + + assert client.token == vault_token + + def test_get_vault_client_with_logout_implicit_token(self, hashi_vault_helper, vault_token_via_env): + client = hashi_vault_helper.get_vault_client(hashi_vault_logout_inferred_token=True) + + assert client.token is None + + def test_has_stringify(self, hashi_vault_helper): + v = 'X' + wrapper = mock.Mock(wraps=_stringify) + with mock.patch('ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common._stringify', wrapper): + r = hashi_vault_helper._stringify(v) + + wrapper.assert_called_once_with(v) + assert r == v, '%r != %r' % (r, v) diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/test_hashi_vault_option_group_base.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/test_hashi_vault_option_group_base.py new file mode 100644 index 000000000..352428cc6 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/module_utils/test_hashi_vault_option_group_base.py @@ -0,0 +1,82 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import pytest + +from ansible_collections.community.hashi_vault.tests.unit.compat import mock + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import ( + HashiVaultOptionGroupBase, + HashiVaultOptionAdapter, +) + + +PREREAD_OPTIONS = { + 'opt1': 'val1', + 'opt2': None, + 'opt3': 'val3', + 'opt4': None, + # no opt5 + 'opt6': None, +} + +LOW_PREF_DEF = { + 'opt1': dict(env=['_ENV_1A'], default='never'), + 'opt2': dict(env=['_ENV_2A', '_ENV_2B']), + 'opt4': dict(env=['_ENV_4A', '_ENV_4B', '_ENV_4C']), + 'opt5': dict(env=['_ENV_5A']), + 'opt6': dict(env=['_ENV_6A'], default='mosdefault'), +} + + +@pytest.fixture +def preread_options(): + return PREREAD_OPTIONS.copy() + + +@pytest.fixture +def adapter(preread_options): + return HashiVaultOptionAdapter.from_dict(preread_options) + + +@pytest.fixture +def option_group_base(adapter): + return HashiVaultOptionGroupBase(adapter) + + +@pytest.fixture(params=[ + # first dict is used to patch the environment vars + # second dict is used to patch the current options to get them to the expected state + # + # envpatch, expatch + ({}, {'opt6': 'mosdefault'}), + ({'_ENV_1A': 'alt1a'}, {'opt6': 'mosdefault'}), + ({'_ENV_3X': 'noop3x'}, {'opt6': 'mosdefault'}), + ({'_ENV_2B': 'alt2b'}, {'opt2': 'alt2b', 'opt6': 'mosdefault'}), + ({'_ENV_2A': 'alt2a', '_ENV_2B': 'alt2b'}, {'opt2': 'alt2a', 'opt6': 'mosdefault'}), + ({'_ENV_4B': 'alt4b', '_ENV_6A': 'defnot', '_ENV_4C': 'alt4c'}, {'opt4': 'alt4b', 'opt6': 'defnot'}), + ({'_ENV_1A': 'alt1a', '_ENV_4A': 'alt4a', '_ENV_1B': 'noop1b', '_ENV_4C': 'alt4c'}, {'opt4': 'alt4a', 'opt6': 'mosdefault'}), + ({'_ENV_5A': 'noop5a', '_ENV_4C': 'alt4c', '_ENV_2A': 'alt2a'}, {'opt2': 'alt2a', 'opt4': 'alt4c', 'opt6': 'mosdefault'}), +]) +def with_env(request, preread_options): + envpatch, expatch = request.param + + expected = preread_options.copy() + expected.update(expatch) + + with mock.patch.dict(os.environ, envpatch): + yield expected + + +class TestHashiVaultOptionGroupBase(object): + + def test_process_late_binding_env_vars(self, option_group_base, with_env, preread_options): + option_group_base.process_late_binding_env_vars(LOW_PREF_DEF) + + assert preread_options == with_env, "Expected: %r\nGot: %r" % (with_env, preread_options) diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/conftest.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/conftest.py new file mode 100644 index 000000000..a80bf5508 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/conftest.py @@ -0,0 +1,76 @@ +# Copyright (c) 2022 Brian Scholer (@briantist) +# Copyright (c) 2017 Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import json + +import pytest + +from ansible.module_utils.six import string_types +from ansible.module_utils.common.text.converters import to_bytes +from ansible.module_utils.common._collections_compat import MutableMapping, Sequence + +from ...compat import mock + + +def pytest_configure(config): + config.addinivalue_line( + "markers", "no_ansible_module_patch: causes the patch_ansible_module fixture to have no effect" + ) + + +@pytest.fixture +def module_warn(): + return mock.MagicMock() + + +@pytest.fixture +def patch_ansible_module(request, module_warn): + def _process(param): + if isinstance(param, string_types): + args = param + _yield = args + return (args, _yield) + elif isinstance(param, MutableMapping): + if '_yield' in param: + y = param.pop('_yield') + _yield = dict((k, v) for k, v in param.items() if k in y) + else: + _yield = param + + if 'ANSIBLE_MODULE_ARGS' not in param: + param = {'ANSIBLE_MODULE_ARGS': param} + if '_ansible_remote_tmp' not in param['ANSIBLE_MODULE_ARGS']: + param['ANSIBLE_MODULE_ARGS']['_ansible_remote_tmp'] = '/tmp' + if '_ansible_keep_remote_files' not in param['ANSIBLE_MODULE_ARGS']: + param['ANSIBLE_MODULE_ARGS']['_ansible_keep_remote_files'] = False + args = json.dumps(param) + return (args, _yield) + elif isinstance(param, Sequence): + # First item should be a dict that serves as the base of options, + # use it for things that aren't being parametrized. + # Each of the remaining items is the name of a fixture whose name + # begins with opt_ (but without the opt_ prefix), and we will look those up. + if not isinstance(param[0], MutableMapping): + raise Exception('First value in patch_ansible_module array param must be a dict') + + margs = param[0] + for fixt in param[1:]: + margs[fixt] = request.getfixturevalue('opt_' + fixt) + + return _process(margs) + else: + raise Exception('Malformed data to the patch_ansible_module pytest fixture') + + if 'no_ansible_module_patch' in request.keywords: + yield + else: + args, _yield = _process(request.param) + with mock.patch('ansible.module_utils.basic._ANSIBLE_ARGS', to_bytes(args)): + # TODO: in 2.10+ we can patch basic.warn instead of basic.AnsibleModule.warn + with mock.patch('ansible.module_utils.basic.AnsibleModule.warn', module_warn): + yield _yield diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_kv1_get.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_kv1_get.py new file mode 100644 index 000000000..abe96fe8d --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_kv1_get.py @@ -0,0 +1,158 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest +import re +import json + +from ansible.module_utils.basic import missing_required_lib + +from ...compat import mock +from .....plugins.modules import vault_kv1_get +from .....plugins.module_utils._hashi_vault_common import HashiVaultValueError + + +hvac = pytest.importorskip('hvac') + + +pytestmark = pytest.mark.usefixtures( + 'patch_ansible_module', + 'patch_authenticator', + 'patch_get_vault_client', +) + + +def _connection_options(): + return { + 'auth_method': 'token', + 'url': 'http://myvault', + 'token': 'beep-boop', + } + + +def _sample_options(): + return { + 'engine_mount_point': 'kv', + 'path': 'endpoint', + } + + +def _combined_options(**kwargs): + opt = _connection_options() + opt.update(_sample_options()) + opt.update(kwargs) + return opt + + +@pytest.fixture +def kv1_get_response(fixture_loader): + return fixture_loader('kv1_get_response.json') + + +class TestModuleVaultKv1Get(): + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_kv1_get_authentication_error(self, authenticator, exc, capfd): + authenticator.authenticate.side_effect = exc + + with pytest.raises(SystemExit) as e: + vault_kv1_get.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == 'throwaway msg', "result: %r" % result + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_kv1_get_auth_validation_error(self, authenticator, exc, capfd): + authenticator.validate.side_effect = exc + + with pytest.raises(SystemExit) as e: + vault_kv1_get.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == 'throwaway msg' + + @pytest.mark.parametrize('opt_engine_mount_point', ['kv', 'other']) + @pytest.mark.parametrize('patch_ansible_module', [[_combined_options(), 'engine_mount_point']], indirect=True) + def test_vault_kv1_get_return_data(self, patch_ansible_module, kv1_get_response, vault_client, opt_engine_mount_point, capfd): + client = vault_client + client.secrets.kv.v1.read_secret.return_value = kv1_get_response.copy() + + expected = {} + expected['raw'] = kv1_get_response.copy() + expected['metadata'] = kv1_get_response.copy() + expected['data'] = expected['metadata'].pop('data') + expected['secret'] = expected['data'] + + with pytest.raises(SystemExit) as e: + vault_kv1_get.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code == 0, "result: %r" % (result,) + + client.secrets.kv.v1.read_secret.assert_called_once_with(path=patch_ansible_module['path'], mount_point=patch_ansible_module['engine_mount_point']) + + for k, v in expected.items(): + assert result[k] == v, ( + "module result did not match expected result:\nmodule: %r\nkey: %s\nexpected: %r" % (result[k], k, v) + ) + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + def test_vault_kv1_get_no_hvac(self, capfd): + with mock.patch.multiple(vault_kv1_get, HAS_HVAC=False, HVAC_IMPORT_ERROR=None, create=True): + with pytest.raises(SystemExit) as e: + vault_kv1_get.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == missing_required_lib('hvac') + + @pytest.mark.parametrize( + 'exc', + [ + (hvac.exceptions.Forbidden, "", r"^Forbidden: Permission Denied to path \['([^']+)'\]"), + ( + hvac.exceptions.InvalidPath, + "Invalid path for a versioned K/V secrets engine", + r"^Invalid path for a versioned K/V secrets engine \['[^']+'\]. If this is a KV version 2 path, use community.hashi_vault.vault_kv2_get" + ), + (hvac.exceptions.InvalidPath, "", r"^Invalid or missing path \['[^']+'\]"), + ] + ) + @pytest.mark.parametrize('patch_ansible_module', [[_combined_options(), 'path']], indirect=True) + @pytest.mark.parametrize('opt_path', ['path/1', 'second/path']) + def test_vault_kv1_get_vault_exception(self, vault_client, exc, opt_path, capfd): + + client = vault_client + client.secrets.kv.v1.read_secret.side_effect = exc[0](exc[1]) + + with pytest.raises(SystemExit) as e: + vault_kv1_get.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + match = re.search(exc[2], result['msg']) + assert match is not None, "result: %r\ndid not match: %s" % (result, exc[2]) + + try: + assert opt_path == match.group(1) + except IndexError: + pass diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_kv2_delete.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_kv2_delete.py new file mode 100644 index 000000000..faca37d61 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_kv2_delete.py @@ -0,0 +1,221 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Isaac Wagner (@idwagner) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest +import re +import json + +from ansible.module_utils.basic import missing_required_lib + +from ...compat import mock +from .....plugins.modules import vault_kv2_delete +from .....plugins.module_utils._hashi_vault_common import HashiVaultValueError + + +hvac = pytest.importorskip('hvac') + + +pytestmark = pytest.mark.usefixtures( + 'patch_ansible_module', + 'patch_authenticator', + 'patch_get_vault_client', +) + + +def _connection_options(): + return { + 'auth_method': 'token', + 'url': 'http://myvault', + 'token': 'beep-boop', + } + + +def _sample_options(): + return { + 'engine_mount_point': 'secret', + 'path': 'endpoint', + } + + +def _combined_options(**kwargs): + opt = _connection_options() + opt.update(_sample_options()) + opt.update(kwargs) + return opt + + +class TestModuleVaultKv2Delete(): + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_kv2_delete_authentication_error(self, authenticator, exc, capfd): + authenticator.authenticate.side_effect = exc + + with pytest.raises(SystemExit) as e: + vault_kv2_delete.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == 'throwaway msg', "result: %r" % result + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_kv2_delete_auth_validation_error(self, authenticator, exc, capfd): + authenticator.validate.side_effect = exc + + with pytest.raises(SystemExit) as e: + vault_kv2_delete.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == 'throwaway msg' + + @pytest.mark.parametrize('opt_versions', [None, [1, 3]]) + @pytest.mark.parametrize('patch_ansible_module', [[_combined_options(), 'versions']], indirect=True) + def test_vault_kv2_delete_empty_response(self, patch_ansible_module, opt_versions, requests_unparseable_response, vault_client, capfd): + client = vault_client + + requests_unparseable_response.status_code = 204 + + if opt_versions: + client.secrets.kv.v2.delete_secret_versions.return_value = requests_unparseable_response + else: + client.secrets.kv.v2.delete_latest_version_of_secret.return_value = requests_unparseable_response + + with pytest.raises(SystemExit) as e: + vault_kv2_delete.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code == 0, "result: %r" % (result,) + + assert result['data'] == {} + + @pytest.mark.parametrize('opt_versions', [None, [1, 3]]) + @pytest.mark.parametrize('patch_ansible_module', [[_combined_options(), 'versions']], indirect=True) + def test_vault_kv2_delete_unparseable_response(self, vault_client, opt_versions, requests_unparseable_response, module_warn, capfd): + client = vault_client + + requests_unparseable_response.status_code = 200 + requests_unparseable_response.content = '(☞゚ヮ゚)☞ ┻━┻' + + if opt_versions: + client.secrets.kv.v2.delete_secret_versions.return_value = requests_unparseable_response + else: + client.secrets.kv.v2.delete_latest_version_of_secret.return_value = requests_unparseable_response + + with pytest.raises(SystemExit) as e: + vault_kv2_delete.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code == 0, "result: %r" % (result,) + assert result['data'] == '(☞゚ヮ゚)☞ ┻━┻' + + module_warn.assert_called_once_with( + 'Vault returned status code 200 and an unparsable body.') + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + def test_vault_kv2_delete_no_hvac(self, capfd): + with mock.patch.multiple(vault_kv2_delete, HAS_HVAC=False, HVAC_IMPORT_ERROR=None, create=True): + with pytest.raises(SystemExit) as e: + vault_kv2_delete.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == missing_required_lib('hvac') + + @pytest.mark.parametrize( + 'exc', + [ + (hvac.exceptions.Forbidden, "", + r"^Forbidden: Permission Denied to path \['([^']+)'\]"), + ] + ) + @pytest.mark.parametrize('opt_versions', [None, [1, 3]]) + @pytest.mark.parametrize('opt_path', ['path/1', 'second/path']) + @pytest.mark.parametrize('patch_ansible_module', [[_combined_options(), 'path', 'versions']], indirect=True) + def test_vault_kv2_delete_vault_exception(self, vault_client, exc, opt_versions, opt_path, capfd): + + client = vault_client + + if opt_versions: + client.secrets.kv.v2.delete_secret_versions.side_effect = exc[0]( + exc[1]) + else: + client.secrets.kv.v2.delete_latest_version_of_secret.side_effect = exc[0]( + exc[1]) + + with pytest.raises(SystemExit) as e: + vault_kv2_delete.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + match = re.search(exc[2], result['msg']) + assert match is not None, "result: %r\ndid not match: %s" % ( + result, exc[2]) + + assert opt_path == match.group(1) + + @pytest.mark.parametrize('opt__ansible_check_mode', [False, True]) + @pytest.mark.parametrize('opt_versions', [None]) + @pytest.mark.parametrize('patch_ansible_module', [[ + _combined_options(), + '_ansible_check_mode', + 'versions' + ]], indirect=True) + def test_vault_kv2_delete_latest_version_call(self, vault_client, opt__ansible_check_mode, opt_versions, capfd): + + client = vault_client + client.secrets.kv.v2.delete_latest_version_of_secret.return_value = {} + + with pytest.raises(SystemExit) as e: + vault_kv2_delete.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + if opt__ansible_check_mode: + client.secrets.kv.v2.delete_latest_version_of_secret.assert_not_called() + else: + client.secrets.kv.v2.delete_latest_version_of_secret.assert_called_once_with( + path='endpoint', mount_point='secret') + + @pytest.mark.parametrize('opt__ansible_check_mode', [False, True]) + @pytest.mark.parametrize('opt_versions', [[1, 3]]) + @pytest.mark.parametrize('patch_ansible_module', [[ + _combined_options(), + '_ansible_check_mode', + 'versions' + ]], indirect=True) + def test_vault_kv2_delete_specific_versions_call(self, vault_client, opt__ansible_check_mode, opt_versions, capfd): + + client = vault_client + client.secrets.kv.v2.delete_secret_versions.return_value = {} + + with pytest.raises(SystemExit) as e: + vault_kv2_delete.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + if opt__ansible_check_mode: + client.secrets.kv.v2.delete_secret_versions.assert_not_called() + else: + client.secrets.kv.v2.delete_secret_versions.assert_called_once_with( + path='endpoint', mount_point='secret', versions=[1, 3]) diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_kv2_get.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_kv2_get.py new file mode 100644 index 000000000..052015b3c --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_kv2_get.py @@ -0,0 +1,168 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest +import re +import json + +from ansible.module_utils.basic import missing_required_lib + +from ...compat import mock +from .....plugins.modules import vault_kv2_get +from .....plugins.module_utils._hashi_vault_common import HashiVaultValueError + + +hvac = pytest.importorskip('hvac') + + +pytestmark = pytest.mark.usefixtures( + 'patch_ansible_module', + 'patch_authenticator', + 'patch_get_vault_client', +) + + +def _connection_options(): + return { + 'auth_method': 'token', + 'url': 'http://myvault', + 'token': 'beep-boop', + } + + +def _sample_options(): + return { + 'engine_mount_point': 'secret', + 'path': 'endpoint', + } + + +def _combined_options(**kwargs): + opt = _connection_options() + opt.update(_sample_options()) + opt.update(kwargs) + return opt + + +@pytest.fixture +def kv2_get_response(fixture_loader): + return fixture_loader('kv2_get_response.json') + + +class TestModuleVaultKv2Get(): + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_kv2_get_authentication_error(self, authenticator, exc, capfd): + authenticator.authenticate.side_effect = exc + + with pytest.raises(SystemExit) as e: + vault_kv2_get.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == 'throwaway msg', "result: %r" % result + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_kv2_get_auth_validation_error(self, authenticator, exc, capfd): + authenticator.validate.side_effect = exc + + with pytest.raises(SystemExit) as e: + vault_kv2_get.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == 'throwaway msg' + + @pytest.mark.parametrize('opt_engine_mount_point', ['secret', 'other']) + @pytest.mark.parametrize('opt_version', [None, 2, 10]) + @pytest.mark.parametrize('patch_ansible_module', [[_combined_options(), 'engine_mount_point', 'version']], indirect=True) + def test_vault_kv2_get_return_data(self, patch_ansible_module, kv2_get_response, vault_client, opt_engine_mount_point, opt_version, capfd): + client = vault_client + rv = kv2_get_response.copy() + rv['data']['metadata']['version'] = opt_version + client.secrets.kv.v2.read_secret_version.return_value = rv + + expected = {} + expected['raw'] = rv.copy() + expected['metadata'] = expected['raw']['data']['metadata'] + expected['data'] = expected['raw']['data'] + expected['secret'] = expected['data']['data'] + + with pytest.raises(SystemExit) as e: + vault_kv2_get.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code == 0, "result: %r" % (result,) + + client.secrets.kv.v2.read_secret_version.assert_called_once_with( + path=patch_ansible_module['path'], + mount_point=patch_ansible_module['engine_mount_point'], + version=opt_version + ) + + for k, v in expected.items(): + assert result[k] == v, ( + "module result did not match expected result:\nmodule: %r\nkey: %s\nexpected: %r" % (result[k], k, v) + ) + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + def test_vault_kv2_get_no_hvac(self, capfd): + with mock.patch.multiple(vault_kv2_get, HAS_HVAC=False, HVAC_IMPORT_ERROR=None, create=True): + with pytest.raises(SystemExit) as e: + vault_kv2_get.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == missing_required_lib('hvac') + + @pytest.mark.parametrize( + 'exc', + [ + (hvac.exceptions.Forbidden, "", r"^Forbidden: Permission Denied to path \['([^']+)'\]"), + ( + hvac.exceptions.InvalidPath, + "", + r"^Invalid or missing path \['([^']+)'\] with secret version '(\d+|latest)'. Check the path or secret version" + ), + ] + ) + @pytest.mark.parametrize('patch_ansible_module', [[_combined_options(), 'path', 'version']], indirect=True) + @pytest.mark.parametrize('opt_path', ['path/1', 'second/path']) + @pytest.mark.parametrize('opt_version', [None, 2, 10]) + def test_vault_kv2_get_vault_exception(self, vault_client, exc, opt_version, opt_path, capfd): + + client = vault_client + client.secrets.kv.v2.read_secret_version.side_effect = exc[0](exc[1]) + + with pytest.raises(SystemExit) as e: + vault_kv2_get.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + match = re.search(exc[2], result['msg']) + assert match is not None, "result: %r\ndid not match: %s" % (result, exc[2]) + + assert opt_path == match.group(1) + + try: + assert (opt_version is None) == (match.group(2) == 'latest') + assert (opt_version is not None) == (match.group(2) == str(opt_version)) + except IndexError: + pass diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_kv2_write.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_kv2_write.py new file mode 100644 index 000000000..4f108a25b --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_kv2_write.py @@ -0,0 +1,163 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2023 Devon Mar (@devon-mar) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import json + +import pytest +from ansible.module_utils.basic import missing_required_lib + +from .....plugins.module_utils._hashi_vault_common import HashiVaultValueError +from .....plugins.modules import vault_kv2_write +from ...compat import mock + +hvac = pytest.importorskip("hvac") + +pytestmark = pytest.mark.usefixtures( + "patch_ansible_module", + "patch_authenticator", + "patch_get_vault_client", +) + + +def _connection_options(): + return { + "auth_method": "token", + "url": "http://myvault", + "token": "beep-boop", + } + + +def _sample_options(): + return { + "engine_mount_point": "secret", + "path": "endpoint", + "data": {"foo": "bar"}, + } + + +def _combined_options(**kwargs): + opt = _connection_options() + opt.update(_sample_options()) + opt.update(kwargs) + return opt + + +class TestModuleVaultKv2Write: + @pytest.mark.parametrize( + "patch_ansible_module", [_combined_options()], indirect=True + ) + @pytest.mark.parametrize( + "exc", + [HashiVaultValueError("throwaway msg"), NotImplementedError("throwaway msg")], + ) + def test_vault_kv2_write_authentication_error(self, authenticator, exc, capfd): + authenticator.authenticate.side_effect = exc + + with pytest.raises(SystemExit) as e: + vault_kv2_write.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result["msg"] == "throwaway msg", "result: %r" % result + + @pytest.mark.parametrize( + "patch_ansible_module", [_combined_options()], indirect=True + ) + @pytest.mark.parametrize( + "exc", + [HashiVaultValueError("throwaway msg"), NotImplementedError("throwaway msg")], + ) + def test_vault_kv2_write_auth_validation_error(self, authenticator, exc, capfd): + authenticator.validate.side_effect = exc + + with pytest.raises(SystemExit) as e: + vault_kv2_write.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result["msg"] == "throwaway msg" + + @pytest.mark.parametrize( + "patch_ansible_module", [_combined_options()], indirect=True + ) + def test_vault_kv2_write_get_no_hvac(self, capfd): + with mock.patch.multiple( + vault_kv2_write, HAS_HVAC=False, HVAC_IMPORT_ERROR=None, create=True + ): + with pytest.raises(SystemExit) as e: + vault_kv2_write.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result["msg"] == missing_required_lib("hvac") + + @pytest.mark.parametrize( + "patch_ansible_module", [[_combined_options(read_before_write=True)]], indirect=True + ) + @pytest.mark.parametrize( + "response", + ({"thishasnodata": {}}, {"data": {"not data": {}}}), + ) + def test_vault_kv2_write_read_responses_invalid( + self, vault_client, capfd, response + ): + client = vault_client + + client.secrets.kv.v2.read_secret_version.return_value = response + + with pytest.raises(SystemExit) as e: + vault_kv2_write.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert "Vault response did not contain data" in result["msg"] + + @pytest.mark.parametrize("exc", [hvac.exceptions.VaultError("throwaway msg")]) + @pytest.mark.parametrize( + "patch_ansible_module", [_combined_options(read_before_write=True)], indirect=True + ) + def test_vault_kv2_write_read_vault_error(self, vault_client, capfd, exc): + client = vault_client + + client.secrets.kv.v2.read_secret_version.side_effect = exc + + with pytest.raises(SystemExit) as e: + vault_kv2_write.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert "VaultError reading" in result["msg"], "result: %r" % (result,) + + @pytest.mark.parametrize("exc", [hvac.exceptions.InvalidPath("throwaway msg")]) + @pytest.mark.parametrize( + "patch_ansible_module", [_combined_options()], indirect=True + ) + def test_vault_kv2_write_write_invalid_path(self, vault_client, capfd, exc): + client = vault_client + + client.secrets.kv.v2.create_or_update_secret.side_effect = exc + + with pytest.raises(SystemExit) as e: + vault_kv2_write.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert "InvalidPath writing to" in result["msg"], "result: %r" % (result,) diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_list.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_list.py new file mode 100644 index 000000000..6891547be --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_list.py @@ -0,0 +1,164 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2023 Tom Kivlin (@tomkivlin) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest +import re +import json + +from ansible.module_utils.basic import missing_required_lib + +from ...compat import mock +from .....plugins.modules import vault_list +from .....plugins.module_utils._hashi_vault_common import HashiVaultValueError + + +hvac = pytest.importorskip('hvac') + + +pytestmark = pytest.mark.usefixtures( + 'patch_ansible_module', + 'patch_authenticator', + 'patch_get_vault_client', +) + + +def _connection_options(): + return { + 'auth_method': 'token', + 'url': 'http://myvault', + 'token': 'beep-boop', + } + + +def _sample_options(): + return { + 'path': 'endpoint', + } + + +def _combined_options(**kwargs): + opt = _connection_options() + opt.update(_sample_options()) + opt.update(kwargs) + return opt + + +LIST_FIXTURES = [ + 'kv2_list_response.json', + 'policy_list_response.json', + 'userpass_list_response.json', +] + + +@pytest.fixture(params=LIST_FIXTURES) +def list_response(request, fixture_loader): + return fixture_loader(request.param) + + +class TestModuleVaultList(): + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_list_authentication_error(self, authenticator, exc, capfd): + authenticator.authenticate.side_effect = exc + + with pytest.raises(SystemExit) as e: + vault_list.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == 'throwaway msg', "result: %r" % result + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_list_auth_validation_error(self, authenticator, exc, capfd): + authenticator.validate.side_effect = exc + + with pytest.raises(SystemExit) as e: + vault_list.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == 'throwaway msg' + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + def test_vault_list_return_data(self, patch_ansible_module, list_response, vault_client, capfd): + client = vault_client + client.list.return_value = list_response.copy() + + with pytest.raises(SystemExit) as e: + vault_list.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code == 0, "result: %r" % (result,) + + client.list.assert_called_once_with(patch_ansible_module['path']) + + assert result['data'] == list_response, "module result did not match expected result:\nexpected: %r\ngot: %r" % (list_response, result) + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + def test_vault_list_no_data(self, patch_ansible_module, vault_client, capfd): + client = vault_client + client.list.return_value = None + + with pytest.raises(SystemExit) as e: + vault_list.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + + client.list.assert_called_once_with(patch_ansible_module['path']) + + match = re.search(r"The path '[^']+' doesn't seem to exist", result['msg']) + + assert match is not None, "Unexpected msg: %s" % result['msg'] + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + def test_vault_list_no_hvac(self, capfd): + with mock.patch.multiple(vault_list, HAS_HVAC=False, HVAC_IMPORT_ERROR=None, create=True): + with pytest.raises(SystemExit) as e: + vault_list.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == missing_required_lib('hvac') + + @pytest.mark.parametrize( + 'exc', + [ + (hvac.exceptions.Forbidden, "", r"^Forbidden: Permission Denied to path '([^']+)'"), + ] + ) + @pytest.mark.parametrize('patch_ansible_module', [[_combined_options(), 'path']], indirect=True) + @pytest.mark.parametrize('opt_path', ['path/1', 'second/path']) + def test_vault_list_vault_exception(self, vault_client, exc, opt_path, capfd): + + client = vault_client + client.list.side_effect = exc[0](exc[1]) + + with pytest.raises(SystemExit) as e: + vault_list.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + match = re.search(exc[2], result['msg']) + assert match is not None, "result: %r\ndid not match: %s" % (result, exc[2]) + + assert opt_path == match.group(1) diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_login.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_login.py new file mode 100644 index 000000000..95356a5c5 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_login.py @@ -0,0 +1,133 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest +import json + +from ansible.module_utils.basic import missing_required_lib + +from ...compat import mock +from .....plugins.modules import vault_login +from .....plugins.module_utils._hashi_vault_common import HashiVaultValueError + + +hvac = pytest.importorskip('hvac') + + +pytestmark = pytest.mark.usefixtures( + 'patch_ansible_module', + 'patch_authenticator', + 'patch_get_vault_client', +) + + +def _connection_options(): + return { + 'auth_method': 'token', + 'url': 'http://myvault', + 'token': 'beep-boop', + } + + +def _sample_options(): + return {} + + +def _combined_options(**kwargs): + opt = _connection_options() + opt.update(_sample_options()) + opt.update(kwargs) + return opt + + +@pytest.fixture +def token_lookup_full_response(fixture_loader): + return fixture_loader('lookup-self_with_meta.json') + + +class TestModuleVaultLogin(): + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_login_authentication_error(self, authenticator, exc, capfd): + authenticator.authenticate.side_effect = exc + + with pytest.raises(SystemExit) as e: + vault_login.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == 'throwaway msg', "result: %r" % result + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_login_auth_validation_error(self, authenticator, exc, capfd): + authenticator.validate.side_effect = exc + + with pytest.raises(SystemExit) as e: + vault_login.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == 'throwaway msg' + + @pytest.mark.parametrize('opt__ansible_check_mode', [False, True]) + @pytest.mark.parametrize( + ['opt_auth_method', 'opt_token', 'opt_role_id'], + [ + ('token', 'beep-boop-bloop', None), + ('approle', None, 'not-used'), + ] + ) + @pytest.mark.parametrize('patch_ansible_module', [[ + _combined_options(), + '_ansible_check_mode', + 'auth_method', + 'token', + 'role_id', + ]], indirect=True) + def test_vault_login_return_data( + self, patch_ansible_module, token_lookup_full_response, authenticator, vault_client, + opt__ansible_check_mode, opt_auth_method, opt_token, opt_role_id, capfd + ): + authenticator.authenticate.return_value = token_lookup_full_response + + with pytest.raises(SystemExit) as e: + vault_login.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code == 0, "result: %r" % (result,) + + authenticator.validate.assert_called_once() + + assert result['changed'] == (opt_auth_method != 'token') + + if opt__ansible_check_mode: + authenticator.authenticate.assert_not_called() + assert result['login'] == {'auth': {'client_token': None}} + else: + authenticator.authenticate.assert_called_once_with(vault_client) + assert result['login'] == token_lookup_full_response, "expected: %r\ngot: %r" % (token_lookup_full_response, result['login']) + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + def test_vault_login_no_hvac(self, capfd): + with mock.patch.multiple(vault_login, HAS_HVAC=False, HVAC_IMPORT_ERROR=None, create=True): + with pytest.raises(SystemExit) as e: + vault_login.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == missing_required_lib('hvac') diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_pki_generate_certificate.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_pki_generate_certificate.py new file mode 100644 index 000000000..29bba2165 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_pki_generate_certificate.py @@ -0,0 +1,131 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest + +import json + +from ansible.module_utils.basic import missing_required_lib + +from ...compat import mock +from .....plugins.modules import vault_pki_generate_certificate + +pytestmark = pytest.mark.usefixtures( + 'patch_ansible_module', + 'patch_authenticator', + 'patch_get_vault_client', +) + + +def _connection_options(): + return { + 'auth_method': 'token', + 'url': 'http://myvault', + 'token': 'throwaway', + } + + +def _sample_options(): + return { + 'role_name': 'some_role', + 'common_name': 'common_name', + 'alt_names': ['a', 'b'], + 'ip_sans': ['c', 'd'], + 'uri_sans': ['e', 'f'], + 'other_sans': ['g', 'h'], + 'ttl': '1h', + 'format': 'der', + 'private_key_format': 'pkcs8', + 'exclude_cn_from_sans': True, + 'engine_mount_point': 'alt', + } + + +def _combined_options(**kwargs): + opt = _connection_options() + opt.update(_sample_options()) + opt.update(kwargs) + return opt + + +@pytest.fixture +def sample_options(): + return _sample_options() + + +@pytest.fixture +def translated_options(sample_options): + toplevel = { + 'role_name': 'name', + 'engine_mount_point': 'mount_point', + 'common_name': 'common_name', + } + + opt = {'extra_params': {}} + for k, v in sample_options.items(): + if k in toplevel: + opt[toplevel[k]] = v + else: + if type(v) is list: + val = ','.join(v) + else: + val = v + + opt['extra_params'][k] = val + + return opt + + +@pytest.fixture +def pki_generate_certificate_response(fixture_loader): + return fixture_loader('pki_generate_certificate_response.json') + + +class TestModuleVaultPkiGenerateCertificate(): + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + def test_vault_pki_generate_certificate_options(self, pki_generate_certificate_response, translated_options, vault_client, capfd): + client = vault_client + client.secrets.pki.generate_certificate.return_value = pki_generate_certificate_response + + with pytest.raises(SystemExit) as e: + vault_pki_generate_certificate.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + client.secrets.pki.generate_certificate.assert_called_once_with(**translated_options) + + assert result['data'] == pki_generate_certificate_response, ( + "module result did not match expected result:\nmodule: %r\nexpected: %r" % (result['data'], pki_generate_certificate_response) + ) + assert e.value.code == 0 + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + def test_vault_pki_generate_certificate_no_hvac(self, capfd): + with mock.patch.multiple(vault_pki_generate_certificate, HAS_HVAC=False, HVAC_IMPORT_ERROR=None, create=True): + with pytest.raises(SystemExit) as e: + vault_pki_generate_certificate.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert result['msg'] == missing_required_lib('hvac') + assert e.value.code != 0 + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + def test_vault_pki_generate_certificate_vault_exception(self, vault_client, capfd): + hvac = pytest.importorskip('hvac') + + client = vault_client + client.secrets.pki.generate_certificate.side_effect = hvac.exceptions.VaultError + + with pytest.raises(SystemExit) as e: + vault_pki_generate_certificate.main() + + assert e.value.code != 0 diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_read.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_read.py new file mode 100644 index 000000000..114b23a31 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_read.py @@ -0,0 +1,157 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest +import re +import json + +from ansible.module_utils.basic import missing_required_lib + +from ...compat import mock +from .....plugins.modules import vault_read +from .....plugins.module_utils._hashi_vault_common import HashiVaultValueError + + +hvac = pytest.importorskip('hvac') + + +pytestmark = pytest.mark.usefixtures( + 'patch_ansible_module', + 'patch_authenticator', + 'patch_get_vault_client', +) + + +def _connection_options(): + return { + 'auth_method': 'token', + 'url': 'http://myvault', + 'token': 'beep-boop', + } + + +def _sample_options(): + return { + 'path': 'endpoint', + } + + +def _combined_options(**kwargs): + opt = _connection_options() + opt.update(_sample_options()) + opt.update(kwargs) + return opt + + +@pytest.fixture +def kv1_get_response(fixture_loader): + return fixture_loader('kv1_get_response.json') + + +class TestModuleVaultRead(): + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_read_authentication_error(self, authenticator, exc, capfd): + authenticator.authenticate.side_effect = exc + + with pytest.raises(SystemExit) as e: + vault_read.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == 'throwaway msg', "result: %r" % result + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_read_auth_validation_error(self, authenticator, exc, capfd): + authenticator.validate.side_effect = exc + + with pytest.raises(SystemExit) as e: + vault_read.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == 'throwaway msg' + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + def test_vault_read_return_data(self, patch_ansible_module, kv1_get_response, vault_client, capfd): + client = vault_client + client.read.return_value = kv1_get_response.copy() + + with pytest.raises(SystemExit) as e: + vault_read.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code == 0, "result: %r" % (result,) + + client.read.assert_called_once_with(patch_ansible_module['path']) + + assert result['data'] == kv1_get_response, "module result did not match expected result:\nexpected: %r\ngot: %r" % (kv1_get_response, result) + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + def test_vault_read_no_data(self, patch_ansible_module, vault_client, capfd): + client = vault_client + client.read.return_value = None + + with pytest.raises(SystemExit) as e: + vault_read.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + + client.read.assert_called_once_with(patch_ansible_module['path']) + + match = re.search(r"The path '[^']+' doesn't seem to exist", result['msg']) + + assert match is not None, "Unexpected msg: %s" % result['msg'] + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + def test_vault_read_no_hvac(self, capfd): + with mock.patch.multiple(vault_read, HAS_HVAC=False, HVAC_IMPORT_ERROR=None, create=True): + with pytest.raises(SystemExit) as e: + vault_read.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == missing_required_lib('hvac') + + @pytest.mark.parametrize( + 'exc', + [ + (hvac.exceptions.Forbidden, "", r"^Forbidden: Permission Denied to path '([^']+)'"), + ] + ) + @pytest.mark.parametrize('patch_ansible_module', [[_combined_options(), 'path']], indirect=True) + @pytest.mark.parametrize('opt_path', ['path/1', 'second/path']) + def test_vault_read_vault_exception(self, vault_client, exc, opt_path, capfd): + + client = vault_client + client.read.side_effect = exc[0](exc[1]) + + with pytest.raises(SystemExit) as e: + vault_read.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + match = re.search(exc[2], result['msg']) + assert match is not None, "result: %r\ndid not match: %s" % (result, exc[2]) + + assert opt_path == match.group(1) diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_token_create.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_token_create.py new file mode 100644 index 000000000..24e44b800 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_token_create.py @@ -0,0 +1,269 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import sys +import pytest + +import json + +from .....plugins.modules import vault_token_create +from .....plugins.module_utils._hashi_vault_common import HashiVaultValueError + +pytestmark = pytest.mark.usefixtures( + 'patch_ansible_module', + 'patch_authenticator', + 'patch_get_vault_client', +) + + +def _connection_options(): + return { + 'auth_method': 'token', + 'url': 'http://myvault', + 'token': 'rando', + } + + +def _pass_thru_options(): + return { + 'no_parent': True, + 'no_default_policy': True, + 'policies': ['a', 'b'], + 'id': 'tokenid', + 'role_name': 'role', + 'meta': {'a': 'valA', 'b': 'valB'}, + 'renewable': True, + 'ttl': '1h', + 'type': 'batch', + 'explicit_max_ttl': '2h', + 'display_name': 'kiminonamae', + 'num_uses': 9, + 'period': '8h', + 'entity_alias': 'alias', + 'wrap_ttl': '60s', + } + + +def _combined_options(**kwargs): + opt = _connection_options() + opt.update(_pass_thru_options()) + opt.update(kwargs) + return opt + + +@pytest.fixture +def pass_thru_options(): + return _pass_thru_options() + + +@pytest.fixture +def orphan_option_translation(): + return { + 'id': 'token_id', + 'role_name': 'role', + 'type': 'token_type', + } + + +@pytest.fixture +def token_create_response(fixture_loader): + return fixture_loader('token_create_response.json') + + +class TestModuleVaultTokenCreate(): + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_token_create_authentication_error(self, authenticator, exc, capfd): + authenticator.authenticate.side_effect = exc + + with pytest.raises(SystemExit) as e: + vault_token_create.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == 'throwaway msg', "result: %r" % result + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_token_create_auth_validation_error(self, authenticator, exc, capfd): + authenticator.validate.side_effect = exc + + with pytest.raises(SystemExit) as e: + vault_token_create.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == 'throwaway msg' + + @pytest.mark.no_ansible_module_patch + def test_vault_token_create_passthru_options_expected(self, pass_thru_options): + # designed to catch the case where new passthru options differ between tests and module + + module_set = set(vault_token_create.PASS_THRU_OPTION_NAMES) + test_set = set(pass_thru_options.keys()) + + assert sorted(vault_token_create.PASS_THRU_OPTION_NAMES) == sorted(pass_thru_options.keys()), ( + "Passthru options in module do not match options in test: %r" % ( + list(module_set ^ test_set) + ) + ) + + @pytest.mark.no_ansible_module_patch + def test_vault_token_create_orphan_options_expected(self, orphan_option_translation, pass_thru_options): + # designed to catch the case where new orphan translations differ between tests and module + # and that all listed translations are present in passthru options + + module_set = set(vault_token_create.ORPHAN_OPTION_TRANSLATION.items()) + test_set = set(orphan_option_translation.items()) + + module_key_set = set(vault_token_create.ORPHAN_OPTION_TRANSLATION.keys()) + pass_thru_key_set = set(pass_thru_options.keys()) + + assert module_set == test_set, ( + "Orphan options in module do not match orphan options in test:\nmodule: %r\ntest: %r" % ( + dict(module_set - test_set), + dict(test_set - module_set), + ) + ) + assert vault_token_create.ORPHAN_OPTION_TRANSLATION.keys() <= pass_thru_options.keys(), ( + "Orphan option translation keys must exist in passthru options: %r" % ( + list(module_key_set - pass_thru_key_set), + ) + ) + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + def test_vault_token_create_passthru_options(self, pass_thru_options, token_create_response, vault_client, capfd): + client = vault_client + client.auth.token.create.return_value = token_create_response + + with pytest.raises(SystemExit): + vault_token_create.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + client.create_token.assert_not_called() + client.auth.token.create_orphan.assert_not_called() + client.auth.token.create.assert_called_once() + + assert result['login'] == token_create_response, ( + "module result did not match expected result:\nmodule: %r\nexpected: %r" % (result['login'], token_create_response) + ) + + if sys.version_info < (3, 8): + # TODO: remove when python < 3.8 is dropped + assert pass_thru_options.items() <= client.auth.token.create.call_args[1].items() + else: + assert pass_thru_options.items() <= client.auth.token.create.call_args.kwargs.items() + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options(orphan=True)], indirect=True) + def test_vault_token_create_orphan_options(self, pass_thru_options, orphan_option_translation, token_create_response, vault_client, capfd): + client = vault_client + client.auth.token.create_orphan.return_value = token_create_response + + with pytest.raises(SystemExit): + vault_token_create.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + client.create_token.assert_not_called() + client.auth.token.create.assert_not_called() + client.auth.token.create_orphan.assert_called_once() + + assert result['login'] == token_create_response, ( + "module result did not match expected result:\nmodule: %r\nexpected: %r" % (result['module'], token_create_response) + ) + + if sys.version_info < (3, 8): + # TODO: remove when python < 3.8 is dropped + call_kwargs = client.auth.token.create_orphan.call_args[1] + else: + call_kwargs = client.auth.token.create_orphan.call_args.kwargs + + for name, orphan in orphan_option_translation.items(): + assert name not in call_kwargs, ( + "'%s' was found in call to orphan method, should be '%s'" % (name, orphan) + ) + assert orphan in call_kwargs, ( + "'%s' (from '%s') was not found in call to orphan method" % (orphan, name) + ) + assert call_kwargs[orphan] == pass_thru_options.get(name), ( + "Expected orphan param '%s' not found or value did not match:\nvalue: %r\nexpected: %r" % ( + orphan, + call_kwargs.get(orphan), + pass_thru_options.get(name), + ) + ) + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options(orphan=True)], indirect=True) + def test_vault_token_create_orphan_fallback(self, token_create_response, vault_client, capfd): + client = vault_client + client.create_token.return_value = token_create_response + client.auth.token.create_orphan.side_effect = AttributeError + + with pytest.raises(SystemExit): + vault_token_create.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + client.auth.token.create_orphan.assert_called_once() + client.create_token.assert_called_once() + + assert result['login'] == token_create_response, ( + "module result did not match expected result:\nmodule: %r\nexpected: %r" % (result['login'], token_create_response) + ) + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + def test_vault_token_create_exception_handling_standard(self, vault_client, capfd): + client = vault_client + client.auth.token.create.side_effect = Exception('side_effect') + + with pytest.raises(SystemExit) as e: + vault_token_create.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == 'side_effect' + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options(orphan=True)], indirect=True) + def test_vault_token_create_exception_handling_orphan(self, vault_client, capfd): + client = vault_client + client.auth.token.create_orphan.side_effect = Exception('side_effect') + + with pytest.raises(SystemExit) as e: + vault_token_create.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == 'side_effect' + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options(orphan=True)], indirect=True) + def test_vault_token_create_exception_handling_orphan_fallback(self, vault_client, capfd): + client = vault_client + client.create_token.side_effect = Exception('side_effect') + client.auth.token.create_orphan.side_effect = AttributeError + + with pytest.raises(SystemExit) as e: + vault_token_create.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == 'side_effect' diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_write.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_write.py new file mode 100644 index 000000000..afe877e8d --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/modules/test_vault_write.py @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest +import re +import json + +from ansible.module_utils.basic import missing_required_lib + +from ...compat import mock +from .....plugins.modules import vault_write +from .....plugins.module_utils._hashi_vault_common import HashiVaultValueError + + +hvac = pytest.importorskip('hvac') + + +pytestmark = pytest.mark.usefixtures( + 'patch_ansible_module', + 'patch_authenticator', + 'patch_get_vault_client', +) + + +def _connection_options(): + return { + 'auth_method': 'token', + 'url': 'http://myvault', + 'token': 'beep-boop', + } + + +def _sample_options(): + return { + 'path': 'endpoint', + } + + +def _combined_options(**kwargs): + opt = _connection_options() + opt.update(_sample_options()) + opt.update(kwargs) + return opt + + +@pytest.fixture +def approle_secret_id_write_response(fixture_loader): + return fixture_loader('approle_secret_id_write_response.json') + + +class TestModuleVaultWrite(): + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_write_authentication_error(self, authenticator, exc, capfd): + authenticator.authenticate.side_effect = exc + + with pytest.raises(SystemExit) as e: + vault_write.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == 'throwaway msg', "result: %r" % result + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + @pytest.mark.parametrize('exc', [HashiVaultValueError('throwaway msg'), NotImplementedError('throwaway msg')]) + def test_vault_write_auth_validation_error(self, authenticator, exc, capfd): + authenticator.validate.side_effect = exc + + with pytest.raises(SystemExit) as e: + vault_write.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == 'throwaway msg' + + @pytest.mark.parametrize('opt_data', [{}, {'thing': 'one', 'thang': 'two'}]) + @pytest.mark.parametrize('opt_wrap_ttl', [None, '5m']) + @pytest.mark.parametrize('patch_ansible_module', [[_combined_options(), 'data', 'wrap_ttl']], indirect=True) + def test_vault_write_return_data(self, patch_ansible_module, approle_secret_id_write_response, vault_client, opt_wrap_ttl, opt_data, capfd): + client = vault_client + client.write.return_value = approle_secret_id_write_response + + with pytest.raises(SystemExit) as e: + vault_write.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code == 0, "result: %r" % (result,) + + client.write.assert_called_once_with(path=patch_ansible_module['path'], wrap_ttl=opt_wrap_ttl, **opt_data) + + assert result['data'] == approle_secret_id_write_response, ( + "module result did not match expected result:\nmodule: %r\nexpected: %r" % (result['data'], approle_secret_id_write_response) + ) + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + def test_vault_write_empty_response(self, vault_client, requests_unparseable_response, capfd): + client = vault_client + + requests_unparseable_response.status_code = 204 + + client.write.return_value = requests_unparseable_response + + with pytest.raises(SystemExit) as e: + vault_write.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code == 0, "result: %r" % (result,) + + assert result['data'] == {} + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + def test_vault_write_unparseable_response(self, vault_client, requests_unparseable_response, module_warn, capfd): + client = vault_client + + requests_unparseable_response.status_code = 200 + requests_unparseable_response.content = '﷽' + + client.write.return_value = requests_unparseable_response + + with pytest.raises(SystemExit) as e: + vault_write.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code == 0, "result: %r" % (result,) + assert result['data'] == '﷽' + + module_warn.assert_called_once_with('Vault returned status code 200 and an unparsable body.') + + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + def test_vault_write_no_hvac(self, capfd): + with mock.patch.multiple(vault_write, HAS_HVAC=False, HVAC_IMPORT_ERROR=None, create=True): + with pytest.raises(SystemExit) as e: + vault_write.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert result['msg'] == missing_required_lib('hvac') + + @pytest.mark.parametrize( + 'exc', + [ + (hvac.exceptions.Forbidden, r'^Forbidden: Permission Denied to path'), + (hvac.exceptions.InvalidPath, r"^The path '[^']+' doesn't seem to exist"), + (hvac.exceptions.InternalServerError, r'^Internal Server Error:'), + ] + ) + @pytest.mark.parametrize('patch_ansible_module', [_combined_options()], indirect=True) + def test_vault_write_vault_exception(self, vault_client, exc, capfd): + + client = vault_client + client.write.side_effect = exc[0] + + with pytest.raises(SystemExit) as e: + vault_write.main() + + out, err = capfd.readouterr() + result = json.loads(out) + + assert e.value.code != 0, "result: %r" % (result,) + assert re.search(exc[1], result['msg']) is not None diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/plugin_utils/authentication/conftest.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/plugin_utils/authentication/conftest.py new file mode 100644 index 000000000..bf577a2d0 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/plugin_utils/authentication/conftest.py @@ -0,0 +1,10 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +# pylint: disable=wildcard-import,unused-wildcard-import +from ...module_utils.authentication.conftest import * diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/plugin_utils/authentication/test_auth_token.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/plugin_utils/authentication/test_auth_token.py new file mode 100644 index 000000000..3dbc4c7fc --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/plugin_utils/authentication/test_auth_token.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest + +from ansible.utils.unsafe_proxy import AnsibleUnsafe, AnsibleUnsafeBytes, AnsibleUnsafeText + +from ansible_collections.community.hashi_vault.tests.unit.compat import mock + +from ansible_collections.community.hashi_vault.plugins.module_utils._auth_method_token import ( + HashiVaultAuthMethodToken, +) + + +@pytest.fixture +def option_dict(): + return { + 'auth_method': 'fake', + 'token': None, + 'token_path': None, + 'token_file': '.vault-token', + 'token_validate': True, + } + + +@pytest.fixture(params=[AnsibleUnsafeBytes(b'ub_opaque'), AnsibleUnsafeText(u'ut_opaque'), b'b_opaque', u't_opaque']) +def stringy(request): + return request.param + + +@pytest.fixture +def auth_token(adapter, warner, deprecator): + return HashiVaultAuthMethodToken(adapter, warner, deprecator) + + +class TestAuthToken(object): + def test_auth_token_unsafes(self, auth_token, client, adapter, stringy): + adapter.set_option('token', stringy) + adapter.set_option('token_validate', False) + + wrapper = mock.Mock(wraps=auth_token._stringify) + + with mock.patch.object(auth_token, '_stringify', wrapper): + response = auth_token.authenticate(client, use_token=True, lookup_self=False) + + assert isinstance(response['auth']['client_token'], (bytes, type(u''))), repr(response['auth']['client_token']) + assert isinstance(client.token, (bytes, type(u''))), repr(client.token) + assert not isinstance(response['auth']['client_token'], AnsibleUnsafe), repr(response['auth']['client_token']) + assert not isinstance(client.token, AnsibleUnsafe), repr(client.token) diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/plugin_utils/base/test_hashi_vault_lookup_base.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/plugin_utils/base/test_hashi_vault_lookup_base.py new file mode 100644 index 000000000..620583b41 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/plugin_utils/base/test_hashi_vault_lookup_base.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest + +from ansible.errors import AnsibleError +from ansible.plugins.lookup import LookupBase + +from ....compat import mock +from ......plugins.plugin_utils._hashi_vault_plugin import HashiVaultPlugin +from ......plugins.plugin_utils._hashi_vault_lookup_base import HashiVaultLookupBase + + +@pytest.fixture +def hashi_vault_lookup_module(): + return FakeLookupModule() + + +class FakeLookupModule(HashiVaultLookupBase): + def run(self, terms, variables=None, **kwargs): + pass + + +class TestHashiVaultLookupBase(object): + + def test_is_hashi_vault_plugin(self, hashi_vault_lookup_module): + assert issubclass(type(hashi_vault_lookup_module), HashiVaultPlugin) + + def test_is_ansible_lookup_base(self, hashi_vault_lookup_module): + assert issubclass(type(hashi_vault_lookup_module), LookupBase) + hashi_vault_lookup_module.run([]) # run this for "coverage" + + @pytest.mark.parametrize( + 'term,unqualified', + [ + ('value1 key2=value2 key3=val_w/=in_it key4=value4', 'key1'), + ('key1=value1 key2=value2 key3=val_w/=in_it key4=value4', None), + ('key1=value1 key2=value2 key3=val_w/=in_it key4=value4', 'NotReal'), + ] + ) + def test_parse_kev_term_success(self, term, unqualified, hashi_vault_lookup_module): + EXPECTED = { + 'key1': 'value1', + 'key2': 'value2', + 'key3': 'val_w/=in_it', + 'key4': 'value4', + } + parsed = hashi_vault_lookup_module.parse_kev_term(term, plugin_name='fake', first_unqualified=unqualified) + + assert parsed == EXPECTED + + @pytest.mark.parametrize( + 'term,unqualified', + [ + ('value1 key2=value2 key3=val_w/=in_it key4=value4', None), + ('key1=value1 value2 key3=val_w/=in_it key4=value4', None), + ('key1=value1 value2 key3=val_w/=in_it key4=value4', 'key2'), + ('key1=val1 invalid key3=val3', None), + ('key1=val1 invalid key3=val3', 'key1'), + ] + ) + def test_parse_kev_term_invalid_term_strings(self, term, unqualified, hashi_vault_lookup_module): + with pytest.raises(AnsibleError): + parsed = hashi_vault_lookup_module.parse_kev_term(term, plugin_name='fake', first_unqualified=unqualified) + + def test_parse_kev_term_plugin_name_required(self, hashi_vault_lookup_module): + with pytest.raises(TypeError): + parsed = hashi_vault_lookup_module.parse_kev_term('key1=value1', first_unqualified='fake') + + # TODO: v5.0.0 - should raise not warn: https://github.com/ansible-collections/community.hashi_vault/pull/350 + @pytest.mark.parametrize('term', [ + 'one secret=two a=1 b=2', + 'a=1 secret=one b=2 secret=two', + 'secret=one secret=two a=z b=y', + ]) + def test_parse_kev_term_duplicate_option(self, term, hashi_vault_lookup_module): + dup_key = 'secret' + removed_in = '5.0.0' + expected_template = "Duplicate key '%s' in the term string '%s'.\nIn version %s of the collection, this will raise an exception." + expected_msg = expected_template % (dup_key, term, removed_in) + + with mock.patch('ansible_collections.community.hashi_vault.plugins.plugin_utils._hashi_vault_lookup_base.display') as display: + hashi_vault_lookup_module.parse_kev_term(term, plugin_name='fake', first_unqualified=dup_key) + display.deprecated.assert_called_once_with(expected_msg, removed_in) diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/plugin_utils/base/test_hashi_vault_plugin.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/plugin_utils/base/test_hashi_vault_plugin.py new file mode 100644 index 000000000..7922e6437 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/plugin_utils/base/test_hashi_vault_plugin.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest + +from ansible.plugins import AnsiblePlugin + +from ansible_collections.community.hashi_vault.plugins.plugin_utils._hashi_vault_plugin import HashiVaultPlugin +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultOptionAdapter + + +@pytest.fixture +def hashi_vault_plugin(): + return HashiVaultPlugin() + + +class TestHashiVaultPlugin(object): + + def test_is_ansible_plugin(self, hashi_vault_plugin): + assert issubclass(type(hashi_vault_plugin), AnsiblePlugin) + + def test_has_option_adapter(self, hashi_vault_plugin): + assert hasattr(hashi_vault_plugin, '_options_adapter') and issubclass(type(hashi_vault_plugin._options_adapter), HashiVaultOptionAdapter) + + # TODO: remove when deprecate() is no longer needed + def test_has_process_deprecations(self, hashi_vault_plugin): + assert hasattr(hashi_vault_plugin, 'process_deprecations') and callable(hashi_vault_plugin.process_deprecations) diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/plugin_utils/option_adapter/conftest.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/plugin_utils/option_adapter/conftest.py new file mode 100644 index 000000000..5c1aa7ed6 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/plugin_utils/option_adapter/conftest.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +# this file must define the "adapter" fixture at a minimum, +# and anything else that it needs or depends on that isn't already defined in in the test files themselves. + +# Keep in mind that this one is for plugin_utils and so it can depend on +# or import controller-side code, however it will only be run against python versions +# that are supported on the controller. + +import pytest + +from ansible.plugins import AnsiblePlugin + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultOptionAdapter + + +class FakePlugin(AnsiblePlugin): + _load_name = 'community.hashi_vault.fake' + + +@pytest.fixture +def ansible_plugin(sample_dict): + plugin = FakePlugin() + plugin._options = sample_dict + return plugin + + +@pytest.fixture +def adapter_from_ansible_plugin(ansible_plugin): + def _create_adapter_from_ansible_plugin(): + return HashiVaultOptionAdapter.from_ansible_plugin(ansible_plugin) + + return _create_adapter_from_ansible_plugin + + +@pytest.fixture(params=['dict', 'dict_defaults', 'ansible_plugin']) +def adapter(request, adapter_from_dict, adapter_from_dict_defaults, adapter_from_ansible_plugin): + return { + 'dict': adapter_from_dict, + 'dict_defaults': adapter_from_dict_defaults, + 'ansible_plugin': adapter_from_ansible_plugin, + }[request.param]() diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/plugin_utils/option_adapter/test_hashi_vault_option_adapter.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/plugin_utils/option_adapter/test_hashi_vault_option_adapter.py new file mode 100644 index 000000000..e78feec17 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/plugin_utils/option_adapter/test_hashi_vault_option_adapter.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +# this file is here just to run the exact same tests as written in the imported file, with the main difference +# being the fixtures defined in conftest.py (this version can run tests that rely on controller-side code) +# and the supported python versions being different. +# So we really do want to import * and so we disable lint failure on wildcard imports. +# +# pylint: disable=wildcard-import,unused-wildcard-import +from ansible_collections.community.hashi_vault.tests.unit.plugins.module_utils.option_adapter.test_hashi_vault_option_adapter import * diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/plugin_utils/test_hashi_vault_common_stringify.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/plugin_utils/test_hashi_vault_common_stringify.py new file mode 100644 index 000000000..1fcd3fd5e --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/plugin_utils/test_hashi_vault_common_stringify.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest + +from ansible.utils.unsafe_proxy import AnsibleUnsafe, AnsibleUnsafeBytes, AnsibleUnsafeText + +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import _stringify + + +@pytest.fixture +def uvalue(): + return u'fake123' + + +@pytest.fixture +def bvalue(): + return b'fake456' + + +class TestHashiVaultCommonStringify(object): + @pytest.mark.parametrize('unsafe', [True, False]) + def test_stringify_bytes(self, unsafe, bvalue): + token = bvalue + if unsafe: + token = AnsibleUnsafeBytes(token) + + r = _stringify(token) + + assert isinstance(r, bytes) + assert not isinstance(r, AnsibleUnsafe) + + @pytest.mark.parametrize('unsafe', [True, False]) + def test_stringify_unicode(self, unsafe, uvalue): + token = uvalue + utype = type(token) + if unsafe: + token = AnsibleUnsafeText(token) + + r = _stringify(token) + + assert isinstance(r, utype) + assert not isinstance(r, AnsibleUnsafe) diff --git a/ansible_collections/community/hashi_vault/tests/unit/plugins/plugin_utils/test_hashi_vault_helper.py b/ansible_collections/community/hashi_vault/tests/unit/plugins/plugin_utils/test_hashi_vault_helper.py new file mode 100644 index 000000000..e71e625c0 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/plugins/plugin_utils/test_hashi_vault_helper.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Brian Scholer (@briantist) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest + +from ansible.utils.unsafe_proxy import AnsibleUnsafeBytes, AnsibleUnsafeText + +from ansible_collections.community.hashi_vault.tests.unit.compat import mock +from ansible_collections.community.hashi_vault.plugins.module_utils._hashi_vault_common import HashiVaultHelper + + +@pytest.fixture +def hashi_vault_helper(): + return HashiVaultHelper() + + +@pytest.fixture +def expected_stringify_candidates(): + return set([ + 'token', + 'namespace', + ]) + + +class TestHashiVaultHelper(object): + def test_expected_stringify_candidates(self, hashi_vault_helper, expected_stringify_candidates): + # If we add more candidates to the set without updating the tests, + # this will help us catch that. The purpose is not to simply update + # the set in the fixture, but to also add specific tests where appropriate. + assert hashi_vault_helper.STRINGIFY_CANDIDATES == expected_stringify_candidates, '%r' % ( + hashi_vault_helper.STRINGIFY_CANDIDATES ^ expected_stringify_candidates + ) + + @pytest.mark.parametrize('input', [b'one', u'two', AnsibleUnsafeBytes(b'three'), AnsibleUnsafeText(u'four')]) + @pytest.mark.parametrize('stringify', [True, False]) + def test_get_vault_client_stringify(self, hashi_vault_helper, expected_stringify_candidates, input, stringify): + kwargs = { + '__no_candidate': AnsibleUnsafeText(u'value'), + } + expected_calls = [] + for k in expected_stringify_candidates: + v = '%s_%s' % (k, input) + kwargs[k] = v + if stringify: + expected_calls.append(mock.call(v)) + + wrapper = mock.Mock(wraps=hashi_vault_helper._stringify) + with mock.patch('hvac.Client'): + with mock.patch.object(hashi_vault_helper, '_stringify', wrapper): + hashi_vault_helper.get_vault_client(hashi_vault_stringify_args=stringify, **kwargs) + + assert wrapper.call_count == len(expected_calls) + wrapper.assert_has_calls(expected_calls) diff --git a/ansible_collections/community/hashi_vault/tests/unit/requirements.txt b/ansible_collections/community/hashi_vault/tests/unit/requirements.txt new file mode 100644 index 000000000..d8844e35b --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/unit/requirements.txt @@ -0,0 +1,15 @@ +# the collection supports python 3.6 and higher, however the constraints for +# earlier python versions are still needed for Ansible < 2.12 which doesn't +# support tests/config.yml, so that unit tests (which will be skipped) won't +# choke on installing requirements. +hvac >= 0.10.6, != 0.10.12, != 0.10.13, < 1.0.0 ; python_version == '2.7' # bugs in 0.10.12 and 0.10.13 prevent it from working in Python 2 +hvac >= 0.10.6, < 1.0.0 ; python_version == '3.5' # py3.5 support will be dropped in 1.0.0 +hvac >= 0.10.6 ; python_version >= '3.6' + +# these should be satisfied naturally by the requests versions required by hvac anyway +urllib3 >= 1.15 ; python_version >= '3.6' # we need raise_on_status for retry support to raise the correct exceptions https://github.com/urllib3/urllib3/blob/main/CHANGES.rst#115-2016-04-06 +urllib3 >= 1.15, <2.0.0 ; python_version < '3.6' # https://urllib3.readthedocs.io/en/latest/v2-roadmap.html#optimized-for-python-3-6 + +# azure-identity 1.7.0 depends on cryptography 2.5 which drops python 2.6 support +azure-identity < 1.7.0; python_version < '2.7' +azure-identity; python_version >= '2.7' diff --git a/ansible_collections/community/hashi_vault/tests/utils/constraints.txt b/ansible_collections/community/hashi_vault/tests/utils/constraints.txt new file mode 100644 index 000000000..f0efeb2e8 --- /dev/null +++ b/ansible_collections/community/hashi_vault/tests/utils/constraints.txt @@ -0,0 +1,62 @@ +coverage >= 4.2, < 5.0.0, != 4.3.2 ; python_version <= '3.7' # features in 4.2+ required, avoid known bug in 4.3.2 on python 2.6, coverage 5.0+ incompatible +coverage >= 4.5.4, < 5.0.0 ; python_version > '3.7' # coverage had a bug in < 4.5.4 that would cause unit tests to hang in Python 3.8, coverage 5.0+ incompatible +cryptography < 2.2 ; python_version < '2.7' # cryptography 2.2 drops support for python 2.6 +deepdiff < 4.0.0 ; python_version < '3' # deepdiff 4.0.0 and later require python 3 +jinja2 < 2.11 ; python_version < '2.7' # jinja2 2.11 and later require python 2.7 or later +urllib3 < 1.24 ; python_version < '2.7' # urllib3 1.24 and later require python 2.7 or later +pywinrm >= 0.3.0 # message encryption support +sphinx < 1.6 ; python_version < '2.7' # sphinx 1.6 and later require python 2.7 or later +sphinx < 1.8 ; python_version >= '2.7' # sphinx 1.8 and later are currently incompatible with rstcheck 3.3 +pygments >= 2.4.0 # Pygments 2.4.0 includes bugfixes for YAML and YAML+Jinja lexers +wheel < 0.30.0 ; python_version < '2.7' # wheel 0.30.0 and later require python 2.7 or later +yamllint != 1.8.0, < 1.14.0 ; python_version < '2.7' # yamllint 1.8.0 and 1.14.0+ require python 2.7+ +pycrypto >= 2.6 # Need features found in 2.6 and greater +ncclient >= 0.5.2 # Need features added in 0.5.2 and greater +idna < 2.6, >= 2.5 # linode requires idna < 2.9, >= 2.5, requests requires idna < 2.6, but cryptography will cause the latest version to be installed instead +paramiko < 2.4.0 ; python_version < '2.7' # paramiko 2.4.0 drops support for python 2.6 +pytest < 3.3.0 ; python_version < '2.7' # pytest 3.3.0 drops support for python 2.6 +pytest < 5.0.0 ; python_version == '2.7' # pytest 5.0.0 and later will no longer support python 2.7 +pytest-forked < 1.0.2 ; python_version < '2.7' # pytest-forked 1.0.2 and later require python 2.7 or later +pytest-forked >= 1.0.2 ; python_version >= '2.7' # pytest-forked before 1.0.2 does not work with pytest 4.2.0+ (which requires python 2.7+) +ntlm-auth >= 1.3.0 # message encryption support using cryptography +requests < 2.20.0 ; python_version < '2.7' # requests 2.20.0 drops support for python 2.6 +requests-ntlm >= 1.1.0 # message encryption support +requests-credssp >= 0.1.0 # message encryption support +voluptuous >= 0.11.0 # Schema recursion via Self +openshift >= 0.6.2, < 0.9.0 # merge_type support +virtualenv < 16.0.0 ; python_version < '2.7' # virtualenv 16.0.0 and later require python 2.7 or later +pathspec < 0.6.0 ; python_version < '2.7' # pathspec 0.6.0 and later require python 2.7 or later +pyopenssl < 18.0.0 ; python_version < '2.7' # pyOpenSSL 18.0.0 and later require python 2.7 or later +pyfmg == 0.6.1 # newer versions do not pass current unit tests +pyyaml < 5.1 ; python_version < '2.7' # pyyaml 5.1 and later require python 2.7 or later +pycparser < 2.19 ; python_version < '2.7' # pycparser 2.19 and later require python 2.7 or later +mock >= 2.0.0 # needed for features backported from Python 3.6 unittest.mock (assert_called, assert_called_once...) +pytest-mock >= 1.4.0 # needed for mock_use_standalone_module pytest option +xmltodict < 0.12.0 ; python_version < '2.7' # xmltodict 0.12.0 and later require python 2.7 or later +lxml < 4.3.0 ; python_version < '2.7' # lxml 4.3.0 and later require python 2.7 or later +pyvmomi < 6.0.0 ; python_version < '2.7' # pyvmomi 6.0.0 and later require python 2.7 or later +pyone == 1.1.9 # newer versions do not pass current integration tests +boto3 < 1.11 ; python_version < '2.7' # boto3 1.11 drops Python 2.6 support +botocore >= 1.10.0, < 1.14 ; python_version < '2.7' # adds support for the following AWS services: secretsmanager, fms, and acm-pca; botocore 1.14 drops Python 2.6 support +botocore >= 1.10.0 ; python_version >= '2.7' # adds support for the following AWS services: secretsmanager, fms, and acm-pca +setuptools < 45 ; python_version <= '2.7' # setuptools 45 and later require python 3.5 or later +cffi >= 1.14.2, != 1.14.3 # Yanked version which older versions of pip will still install: + +# freeze pylint and its requirements for consistent test results +astroid == 2.2.5 +isort == 4.3.15 +lazy-object-proxy == 1.3.1 +mccabe == 0.6.1 +pylint == 2.3.1 +typed-ast == 1.4.0 # 1.4.0 is required to compile on Python 3.8 +wrapt == 1.11.1 + +# hvac +hvac >= 0.10.6, != 0.10.12, != 0.10.13, < 1.0.0 ; python_version == '2.7' # bugs in 0.10.12 and 0.10.13 prevent it from working in Python 2 +hvac >= 0.10.6, < 1.0.0 ; python_version == '3.5' # py3.5 support will be dropped in 1.0.0 +hvac >= 0.10.6 ; python_version >= '3.6' + +# urllib3 +# these should be satisfied naturally by the requests versions required by hvac anyway +urllib3 >= 1.15 ; python_version >= '3.6' # we need raise_on_status for retry support to raise the correct exceptions https://github.com/urllib3/urllib3/blob/main/CHANGES.rst#115-2016-04-06 +urllib3 >= 1.15, <2.0.0 ; python_version < '3.6' # https://urllib3.readthedocs.io/en/latest/v2-roadmap.html#optimized-for-python-3-6 |