diff options
Diffstat (limited to 'collections-debian-merged/ansible_collections/community/azure/tests/utils')
8 files changed, 532 insertions, 0 deletions
diff --git a/collections-debian-merged/ansible_collections/community/azure/tests/utils/ado/ado.sh b/collections-debian-merged/ansible_collections/community/azure/tests/utils/ado/ado.sh new file mode 100755 index 00000000..55d7f66d --- /dev/null +++ b/collections-debian-merged/ansible_collections/community/azure/tests/utils/ado/ado.sh @@ -0,0 +1,124 @@ +#!/usr/bin/env bash + +set -o pipefail -eux + +declare -a args +IFS='/:' read -ra args <<< "$1" + +group="${args[0]}" + +command -v python +python -V + +if [ "$2" = "2.7" ] +then + command -v pip + pip --version + pip list --disable-pip-version-check +else + command -v pip3 + pip3 --version + pip3 list --disable-pip-version-check +fi + +export PATH="${PWD}/bin:${PATH}" +export PYTHONIOENCODING="UTF-8" +export LC_ALL="en_US.utf-8" + +if [ "$2" = "2.7" ] +then + pip install virtualenv + virtualenv --python /usr/bin/python2.7 ~/ansible-venv +else + pip3 install virtualenv + virtualenv --python /usr/bin/python"$2" ~/ansible-venv +fi + +set +ux +. ~/ansible-venv/bin/activate +set -ux + +if [ "$2" = "2.7" ] +then + if [ "$3" = "devel" ] + then + pip install git+https://github.com/ansible/ansible.git@devel --disable-pip-version-check + else + git clone https://github.com/ansible/ansible.git + cd "ansible" + git checkout "stable-$3" + source hacking/env-setup + pip install paramiko PyYAML Jinja2 httplib2 six + fi +else + if [ "$3" = "devel" ] + then + pip3 install git+https://github.com/ansible/ansible.git@devel --disable-pip-version-check + else + git clone https://github.com/ansible/ansible.git + cd "ansible" + git checkout "stable-$3" + source hacking/env-setup + pip3 install paramiko PyYAML Jinja2 httplib2 six + fi +fi + +TEST_DIR="${HOME}/.ansible/ansible_collections/azure/azcollection" +mkdir -p "${TEST_DIR}" +cp -aT "${SHIPPABLE_BUILD_DIR}" "${TEST_DIR}" +cd "${TEST_DIR}" +mkdir -p shippable/testresults + +if [ "$2" = "2.7" ] +then + pip install --upgrade pip + pip install -I -r "${TEST_DIR}/requirements-azure.txt" + pip3 install setuptools + pip3 install -I -r "${TEST_DIR}/sanity-requirements-azure.txt" + pip3 list +else + pip3 install -I -r "${TEST_DIR}/requirements-azure.txt" + pip3 install -I -r "${TEST_DIR}/sanity-requirements-azure.txt" + pip3 list +fi + +timeout=60 + +if [ "$4" = "all" ] +then + echo "All module need test" +else + path_dir="${TEST_DIR}/tests/integration/targets/" + for item in "$path_dir"* + do + if [ "${item}" = "$path_dir""$4" ] + then + echo "PASS" + else + echo " " >> "${item}"/aliases + echo "disabled" >> "${item}"/aliases + fi + done +fi +echo '--------------------------------------------' +ansible --version +echo '--------------------------------------------' + +ansible-test env --dump --show --timeout "${timeout}" --color -v + +cat <<EOF >> "${TEST_DIR}"/tests/integration/cloud-config-azure.ini +[default] +AZURE_CLIENT_ID:${AZURE_CLIENT_ID} +AZURE_SECRET:${AZURE_SECRET} +AZURE_SUBSCRIPTION_ID:${AZURE_SUBSCRIPTION_ID} +AZURE_TENANT:${AZURE_TENANT} +RESOURCE_GROUP:${RESOURCE_GROUP} +RESOURCE_GROUP_SECONDARY:${RESOURCE_GROUP_SECONDARY} +EOF + +if [ "sanity" = "${group}" ] +then + ansible-test sanity --color -v --junit +else + ansible-test integration --color -v --retry-on-error "shippable/azure/group${group}/" --allow-destructive +fi diff --git a/collections-debian-merged/ansible_collections/community/azure/tests/utils/shippable/azure.sh b/collections-debian-merged/ansible_collections/community/azure/tests/utils/shippable/azure.sh new file mode 100755 index 00000000..d76c3228 --- /dev/null +++ b/collections-debian-merged/ansible_collections/community/azure/tests/utils/shippable/azure.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +set -o pipefail -eux + +declare -a args +IFS='/:' read -ra args <<< "$1" + +cloud="${args[0]}" +python="${args[1]}" +group="${args[2]}" + +target="shippable/${cloud}/group${group}/" + +stage="${S:-prod}" + +# shellcheck disable=SC2086 +ansible-test integration --color -v --retry-on-error "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} ${UNSTABLE:+"$UNSTABLE"} \ + --remote-terminate always --remote-stage "${stage}" \ + --docker --python "${python}" diff --git a/collections-debian-merged/ansible_collections/community/azure/tests/utils/shippable/check_matrix.py b/collections-debian-merged/ansible_collections/community/azure/tests/utils/shippable/check_matrix.py new file mode 100755 index 00000000..fb559466 --- /dev/null +++ b/collections-debian-merged/ansible_collections/community/azure/tests/utils/shippable/check_matrix.py @@ -0,0 +1,120 @@ +#!/usr/bin/env python +"""Verify the currently executing Shippable test matrix matches the one defined in the "shippable.yml" file.""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import datetime +import json +import os +import re +import sys +import time + +try: + from typing import NoReturn +except ImportError: + NoReturn = None + +try: + # noinspection PyCompatibility + from urllib2 import urlopen # pylint: disable=ansible-bad-import-from +except ImportError: + # noinspection PyCompatibility + from urllib.request import urlopen + + +def main(): # type: () -> None + """Main entry point.""" + repo_full_name = os.environ['REPO_FULL_NAME'] + required_repo_full_name = 'ansible-collections/community.azure' + + if repo_full_name != required_repo_full_name: + sys.stderr.write('Skipping matrix check on repo "%s" which is not "%s".\n' % (repo_full_name, required_repo_full_name)) + return + + with open('shippable.yml', 'rb') as yaml_file: + yaml = yaml_file.read().decode('utf-8').splitlines() + + defined_matrix = [match.group(1) for match in [re.search(r'^ *- env: T=(.*)$', line) for line in yaml] if match and match.group(1) != 'none'] + + if not defined_matrix: + fail('No matrix entries found in the "shippable.yml" file.', + 'Did you modify the "shippable.yml" file?') + + run_id = os.environ['SHIPPABLE_BUILD_ID'] + sleep = 1 + jobs = [] + + for attempts_remaining in range(4, -1, -1): + try: + jobs = json.loads(urlopen('https://api.shippable.com/jobs?runIds=%s' % run_id).read()) + + if not isinstance(jobs, list): + raise Exception('Shippable run %s data is not a list.' % run_id) + + break + except Exception as ex: + if not attempts_remaining: + fail('Unable to retrieve Shippable run %s matrix.' % run_id, + str(ex)) + + sys.stderr.write('Unable to retrieve Shippable run %s matrix: %s\n' % (run_id, ex)) + sys.stderr.write('Trying again in %d seconds...\n' % sleep) + time.sleep(sleep) + sleep *= 2 + + if len(jobs) != len(defined_matrix): + if len(jobs) == 1: + hint = '\n\nMake sure you do not use the "Rebuild with SSH" option.' + else: + hint = '' + + fail('Shippable run %s has %d jobs instead of the expected %d jobs.' % (run_id, len(jobs), len(defined_matrix)), + 'Try re-running the entire matrix.%s' % hint) + + actual_matrix = dict((job.get('jobNumber'), dict(tuple(line.split('=', 1)) for line in job.get('env', [])).get('T', '')) for job in jobs) + errors = [(job_number, test, actual_matrix.get(job_number)) for job_number, test in enumerate(defined_matrix, 1) if actual_matrix.get(job_number) != test] + + if len(errors): + error_summary = '\n'.join('Job %s expected "%s" but found "%s" instead.' % (job_number, expected, actual) for job_number, expected, actual in errors) + + fail('Shippable run %s has a job matrix mismatch.' % run_id, + 'Try re-running the entire matrix.\n\n%s' % error_summary) + + +def fail(message, output): # type: (str, str) -> NoReturn + # Include a leading newline to improve readability on Shippable "Tests" tab. + # Without this, the first line becomes indented. + output = '\n' + output.strip() + + timestamp = datetime.datetime.utcnow().replace(microsecond=0).isoformat() + + # hack to avoid requiring junit-xml, which isn't pre-installed on Shippable outside our test containers + xml = ''' +<?xml version="1.0" encoding="utf-8"?> +<testsuites disabled="0" errors="1" failures="0" tests="1" time="0.0"> +\t<testsuite disabled="0" errors="1" failures="0" file="None" log="None" name="ansible-test" skipped="0" tests="1" time="0" timestamp="%s" url="None"> +\t\t<testcase classname="timeout" name="timeout"> +\t\t\t<error message="%s" type="error">%s</error> +\t\t</testcase> +\t</testsuite> +</testsuites> +''' % (timestamp, message, output) + + path = 'shippable/testresults/check-matrix.xml' + dir_path = os.path.dirname(path) + + if not os.path.exists(dir_path): + os.makedirs(dir_path) + + with open(path, 'w') as junit_fd: + junit_fd.write(xml.lstrip()) + + sys.stderr.write(message + '\n') + sys.stderr.write(output + '\n') + + sys.exit(1) + + +if __name__ == '__main__': + main() diff --git a/collections-debian-merged/ansible_collections/community/azure/tests/utils/shippable/cloud.sh b/collections-debian-merged/ansible_collections/community/azure/tests/utils/shippable/cloud.sh new file mode 100755 index 00000000..d76c3228 --- /dev/null +++ b/collections-debian-merged/ansible_collections/community/azure/tests/utils/shippable/cloud.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +set -o pipefail -eux + +declare -a args +IFS='/:' read -ra args <<< "$1" + +cloud="${args[0]}" +python="${args[1]}" +group="${args[2]}" + +target="shippable/${cloud}/group${group}/" + +stage="${S:-prod}" + +# shellcheck disable=SC2086 +ansible-test integration --color -v --retry-on-error "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} ${UNSTABLE:+"$UNSTABLE"} \ + --remote-terminate always --remote-stage "${stage}" \ + --docker --python "${python}" diff --git a/collections-debian-merged/ansible_collections/community/azure/tests/utils/shippable/sanity.sh b/collections-debian-merged/ansible_collections/community/azure/tests/utils/shippable/sanity.sh new file mode 100755 index 00000000..c216220e --- /dev/null +++ b/collections-debian-merged/ansible_collections/community/azure/tests/utils/shippable/sanity.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash + +set -o pipefail -eux + +declare -a args +IFS='/:' read -ra args <<< "$1" + +group="${args[1]}" + +if [ "${BASE_BRANCH:-}" ]; then + base_branch="origin/${BASE_BRANCH}" +else + base_branch="" +fi + +if [ "${group}" == "extra" ]; then + # ansible-galaxy -vvv collection install community.internal_test_tools + git clone --single-branch --depth 1 https://github.com/ansible-collections/community.internal_test_tools.git ../internal_test_tools + + ../internal_test_tools/tools/run.py --color + exit +fi + +# shellcheck disable=SC2086 +ansible-test sanity --color -v --junit ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} \ + --docker --base-branch "${base_branch}" \ + --allow-disabled diff --git a/collections-debian-merged/ansible_collections/community/azure/tests/utils/shippable/shippable.sh b/collections-debian-merged/ansible_collections/community/azure/tests/utils/shippable/shippable.sh new file mode 100755 index 00000000..1f9672d4 --- /dev/null +++ b/collections-debian-merged/ansible_collections/community/azure/tests/utils/shippable/shippable.sh @@ -0,0 +1,202 @@ +#!/usr/bin/env bash + +set -o pipefail -eux + +declare -a args +IFS='/:' read -ra args <<< "$1" + +ansible_version="${args[0]}" +script="${args[1]}" + +function join { + local IFS="$1"; + shift; + echo "$*"; +} + +test="$(join / "${args[@]:1}")" + +docker images ansible/ansible +docker images quay.io/ansible/* +docker ps + +for container in $(docker ps --format '{{.Image}} {{.ID}}' | grep -v '^drydock/' | sed 's/^.* //'); do + docker rm -f "${container}" || true # ignore errors +done + +docker ps + +if [ -d /home/shippable/cache/ ]; then + ls -la /home/shippable/cache/ +fi + +command -v python +python -V + +function retry +{ + # shellcheck disable=SC2034 + for repetition in 1 2 3; do + set +e + "$@" + result=$? + set -e + if [ ${result} == 0 ]; then + return ${result} + fi + echo "@* -> ${result}" + done + echo "Command '@*' failed 3 times!" + exit -1 +} + +command -v pip +pip --version +pip list --disable-pip-version-check +if [ "${ansible_version}" == "devel" ]; then + retry pip install https://github.com/ansible/ansible/archive/devel.tar.gz --disable-pip-version-check +else + retry pip install "https://github.com/ansible/ansible/archive/stable-${ansible_version}.tar.gz" --disable-pip-version-check +fi + +export ANSIBLE_COLLECTIONS_PATHS="${HOME}/.ansible" +SHIPPABLE_RESULT_DIR="$(pwd)/shippable" +TEST_DIR="${ANSIBLE_COLLECTIONS_PATHS}/ansible_collections/community/azure" +mkdir -p "${TEST_DIR}" +cp -aT "${SHIPPABLE_BUILD_DIR}" "${TEST_DIR}" +cd "${TEST_DIR}" + +# START: HACK install dependencies +retry ansible-galaxy -vvv collection install azure.azcollection + +# END: HACK + +export PYTHONIOENCODING='utf-8' + +if [ "${JOB_TRIGGERED_BY_NAME:-}" == "nightly-trigger" ]; then + COVERAGE=yes + COMPLETE=yes +fi + +if [ -n "${COVERAGE:-}" ]; then + # on-demand coverage reporting triggered by setting the COVERAGE environment variable to a non-empty value + export COVERAGE="--coverage" +elif [[ "${COMMIT_MESSAGE}" =~ ci_coverage ]]; then + # on-demand coverage reporting triggered by having 'ci_coverage' in the latest commit message + export COVERAGE="--coverage" +else + # on-demand coverage reporting disabled (default behavior, always-on coverage reporting remains enabled) + export COVERAGE="--coverage-check" +fi + +if [ -n "${COMPLETE:-}" ]; then + # disable change detection triggered by setting the COMPLETE environment variable to a non-empty value + export CHANGED="" +elif [[ "${COMMIT_MESSAGE}" =~ ci_complete ]]; then + # disable change detection triggered by having 'ci_complete' in the latest commit message + export CHANGED="" +else + # enable change detection (default behavior) + export CHANGED="--changed" +fi + +if [ "${IS_PULL_REQUEST:-}" == "true" ]; then + # run unstable tests which are targeted by focused changes on PRs + export UNSTABLE="--allow-unstable-changed" +else + # do not run unstable tests outside PRs + export UNSTABLE="" +fi + +# remove empty core/extras module directories from PRs created prior to the repo-merge +find plugins -type d -empty -print -delete + +function cleanup +{ + # for complete on-demand coverage generate a report for all files with no coverage on the "sanity/5" job so we only have one copy + if [ "${COVERAGE}" == "--coverage" ] && [ "${CHANGED}" == "" ] && [ "${test}" == "sanity/5" ]; then + stub="--stub" + # trigger coverage reporting for stubs even if no other coverage data exists + mkdir -p tests/output/coverage/ + else + stub="" + fi + + if [ -d tests/output/coverage/ ]; then + if find tests/output/coverage/ -mindepth 1 -name '.*' -prune -o -print -quit | grep -q .; then + process_coverage='yes' # process existing coverage files + elif [ "${stub}" ]; then + process_coverage='yes' # process coverage when stubs are enabled + else + process_coverage='' + fi + + if [ "${process_coverage}" ]; then + # use python 3.7 for coverage to avoid running out of memory during coverage xml processing + # only use it for coverage to avoid the additional overhead of setting up a virtual environment for a potential no-op job + virtualenv --python /usr/bin/python3.7 ~/ansible-venv + set +ux + . ~/ansible-venv/bin/activate + set -ux + + # shellcheck disable=SC2086 + ansible-test coverage xml --color -v --requirements --group-by command --group-by version ${stub:+"$stub"} + cp -a tests/output/reports/coverage=*.xml "$SHIPPABLE_RESULT_DIR/codecoverage/" + + if [ "${ansible_version}" != "2.9" ]; then + # analyze and capture code coverage aggregated by integration test target + ansible-test coverage analyze targets generate -v "$SHIPPABLE_RESULT_DIR/testresults/coverage-analyze-targets.json" + fi + + # upload coverage report to codecov.io only when using complete on-demand coverage + if [ "${COVERAGE}" == "--coverage" ] && [ "${CHANGED}" == "" ]; then + for file in tests/output/reports/coverage=*.xml; do + flags="${file##*/coverage=}" + flags="${flags%-powershell.xml}" + flags="${flags%.xml}" + # remove numbered component from stub files when converting to tags + flags="${flags//stub-[0-9]*/stub}" + flags="${flags//=/,}" + flags="${flags//[^a-zA-Z0-9_,]/_}" + + bash <(curl -s https://codecov.io/bash) \ + -f "${file}" \ + -F "${flags}" \ + -n "${test}" \ + -t 47041dbd-4bef-43b8-8873-4c6a1a8a3711 \ + -X coveragepy \ + -X gcov \ + -X fix \ + -X search \ + -X xcode \ + || echo "Failed to upload code coverage report to codecov.io: ${file}" + done + fi + fi + fi + + if [ -d tests/output/junit/ ]; then + cp -aT tests/output/junit/ "$SHIPPABLE_RESULT_DIR/testresults/" + fi + + if [ -d tests/output/data/ ]; then + cp -a tests/output/data/ "$SHIPPABLE_RESULT_DIR/testresults/" + fi + + if [ -d tests/output/bot/ ]; then + cp -aT tests/output/bot/ "$SHIPPABLE_RESULT_DIR/testresults/" + fi +} + +trap cleanup EXIT + +if [[ "${COVERAGE:-}" == "--coverage" ]]; then + timeout=60 +else + timeout=50 +fi + +ansible-test env --dump --show --timeout "${timeout}" --color -v + +"tests/utils/shippable/check_matrix.py" +"tests/utils/shippable/${script}.sh" "${test}" diff --git a/collections-debian-merged/ansible_collections/community/azure/tests/utils/shippable/timing.py b/collections-debian-merged/ansible_collections/community/azure/tests/utils/shippable/timing.py new file mode 100755 index 00000000..fb538271 --- /dev/null +++ b/collections-debian-merged/ansible_collections/community/azure/tests/utils/shippable/timing.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python3.7 +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import sys +import time + +start = time.time() + +sys.stdin.reconfigure(errors='surrogateescape') +sys.stdout.reconfigure(errors='surrogateescape') + +for line in sys.stdin: + seconds = time.time() - start + sys.stdout.write('%02d:%02d %s' % (seconds // 60, seconds % 60, line)) + sys.stdout.flush() diff --git a/collections-debian-merged/ansible_collections/community/azure/tests/utils/shippable/timing.sh b/collections-debian-merged/ansible_collections/community/azure/tests/utils/shippable/timing.sh new file mode 100755 index 00000000..77e25783 --- /dev/null +++ b/collections-debian-merged/ansible_collections/community/azure/tests/utils/shippable/timing.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +set -o pipefail -eu + +"$@" 2>&1 | "$(dirname "$0")/timing.py" |