diff options
Diffstat (limited to 'collections-debian-merged/ansible_collections/community/network/.azure-pipelines')
12 files changed, 599 insertions, 0 deletions
diff --git a/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/README.md b/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/README.md new file mode 100644 index 00000000..385e70ba --- /dev/null +++ b/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/README.md @@ -0,0 +1,3 @@ +## Azure Pipelines Configuration + +Please see the [Documentation](https://github.com/ansible/community/wiki/Testing:-Azure-Pipelines) for more information. diff --git a/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/azure-pipelines.yml b/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/azure-pipelines.yml new file mode 100644 index 00000000..5948ce7c --- /dev/null +++ b/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/azure-pipelines.yml @@ -0,0 +1,252 @@ +trigger: + batch: true + branches: + include: + - main + - stable-* + +pr: + autoCancel: true + branches: + include: + - main + - stable-* + +schedules: + - cron: 0 9 * * * + displayName: Nightly + always: true + branches: + include: + - main + - stable-* + +variables: + - name: checkoutPath + value: ansible_collections/community/network + - name: coverageBranches + value: main + - name: pipelinesCoverage + value: coverage + - name: entryPoint + value: tests/utils/shippable/shippable.sh + - name: fetchDepth + value: 0 + +resources: + containers: + - container: default + image: quay.io/ansible/azure-pipelines-test-container:1.7.1 + +pool: Standard + +stages: +### Sanity + - stage: Sanity_devel + displayName: Sanity devel + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + nameFormat: Test {0} + testFormat: devel/sanity/{0} + targets: + - test: 1 + - test: 2 + - test: 3 + - test: 4 + - test: 5 + - test: extra + - stage: Sanity_2_10 + displayName: Sanity 2.10 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + nameFormat: Test {0} + testFormat: 2.10/sanity/{0} + targets: + - test: 1 + - test: 2 + - test: 3 + - test: 4 + - test: 5 + - stage: Sanity_2_9 + displayName: Sanity 2.9 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + nameFormat: Test {0} + testFormat: 2.9/sanity/{0} + targets: + - test: 1 + - test: 2 + - test: 3 + - test: 4 + - test: 5 +### Units + - stage: Units_devel + displayName: Units devel + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + nameFormat: Python {0} + testFormat: devel/units/{0}/1 + targets: + - test: 2.6 + - test: 2.7 + - test: 3.5 + - test: 3.6 + - test: 3.7 + - test: 3.8 + - test: 3.9 + - stage: Units_2_10 + displayName: Units 2.10 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + nameFormat: Python {0} + testFormat: 2.10/units/{0}/1 + targets: + - test: 2.6 + - test: 2.7 + - test: 3.5 + - test: 3.6 + - test: 3.7 + - test: 3.8 + - test: 3.9 + - stage: Units_2_9 + displayName: Units 2.9 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + nameFormat: Python {0} + testFormat: 2.9/units/{0}/1 + targets: + - test: 2.6 + - test: 2.7 + - test: 3.5 + - test: 3.6 + - test: 3.7 + - test: 3.8 +### Docker + - stage: Docker_devel + displayName: Docker devel + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: devel/linux/{0}/1 + targets: + - name: CentOS 6 + test: centos6 + - name: CentOS 7 + test: centos7 + - name: CentOS 8 + test: centos8 + - name: Fedora 32 + test: fedora32 + - name: Fedora 33 + test: fedora33 + - name: openSUSE 15 py2 + test: opensuse15py2 + - name: openSUSE 15 py3 + test: opensuse15 + - name: Ubuntu 16.04 + test: ubuntu1604 + - name: Ubuntu 18.04 + test: ubuntu1804 + - stage: Docker_2_10 + displayName: Docker 2.10 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: 2.10/linux/{0}/1 + targets: + - name: CentOS 8 + test: centos8 + - name: Fedora 32 + test: fedora32 + - name: openSUSE 15 py3 + test: opensuse15 + - name: Ubuntu 18.04 + test: ubuntu1804 + + - stage: Docker_2_9 + displayName: Docker 2.9 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: 2.9/linux/{0}/1 + targets: + - name: CentOS 7 + test: centos7 + - name: Fedora 31 + test: fedora31 + - name: openSUSE 15 py2 + test: opensuse15py2 + - name: Ubuntu 16.04 + test: ubuntu1604 + +### Remote + - stage: Remote_devel + displayName: Remote devel + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: devel/{0}/1 + targets: + - name: RHEL 8.2 + test: rhel/8.2 + - name: FreeBSD 11.1 + test: freebsd/11.1 + - name: FreeBSD 12.1 + test: freebsd/12.1 + - stage: Remote_2_10 + displayName: Remote 2.10 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: 2.10/{0}/1 + targets: + - name: RHEL 8.2 + test: rhel/8.2 + - name: FreeBSD 12.1 + test: freebsd/12.1 + - stage: Remote_2_9 + displayName: Remote 2.9 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: 2.9/{0}/1 + targets: + - name: RHEL 8.1 + test: rhel/8.1 + - name: FreeBSD 11.1 + test: freebsd/11.1 + - stage: Summary + condition: succeededOrFailed() + dependsOn: + - Sanity_devel + - Sanity_2_10 + - Sanity_2_9 + - Units_devel + - Units_2_10 + - Units_2_9 + - Remote_devel + - Docker_devel + - Remote_2_10 + - Docker_2_10 + - Remote_2_9 + - Docker_2_9 + jobs: + - template: templates/coverage.yml diff --git a/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/scripts/aggregate-coverage.sh b/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/scripts/aggregate-coverage.sh new file mode 100755 index 00000000..f3113dd0 --- /dev/null +++ b/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/scripts/aggregate-coverage.sh @@ -0,0 +1,20 @@ +#!/usr/bin/env bash +# Aggregate code coverage results for later processing. + +set -o pipefail -eu + +agent_temp_directory="$1" + +PATH="${PWD}/bin:${PATH}" + +mkdir "${agent_temp_directory}/coverage/" + +options=(--venv --venv-system-site-packages --color -v) + +ansible-test coverage combine --export "${agent_temp_directory}/coverage/" "${options[@]}" + +if ansible-test coverage analyze targets generate --help >/dev/null 2>&1; then + # Only analyze coverage if the installed version of ansible-test supports it. + # Doing so allows this script to work unmodified for multiple Ansible versions. + ansible-test coverage analyze targets generate "${agent_temp_directory}/coverage/coverage-analyze-targets.json" "${options[@]}" +fi diff --git a/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/scripts/combine-coverage.py b/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/scripts/combine-coverage.py new file mode 100755 index 00000000..506ade64 --- /dev/null +++ b/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/scripts/combine-coverage.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python +""" +Combine coverage data from multiple jobs, keeping the data only from the most recent attempt from each job. +Coverage artifacts must be named using the format: "Coverage $(System.JobAttempt) {StableUniqueNameForEachJob}" +The recommended coverage artifact name format is: Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName) +Keep in mind that Azure Pipelines does not enforce unique job display names (only names). +It is up to pipeline authors to avoid name collisions when deviating from the recommended format. +""" + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import re +import shutil +import sys + + +def main(): + """Main program entry point.""" + source_directory = sys.argv[1] + + if '/ansible_collections/' in os.getcwd(): + output_path = "tests/output" + else: + output_path = "test/results" + + destination_directory = os.path.join(output_path, 'coverage') + + if not os.path.exists(destination_directory): + os.makedirs(destination_directory) + + jobs = {} + count = 0 + + for name in os.listdir(source_directory): + match = re.search('^Coverage (?P<attempt>[0-9]+) (?P<label>.+)$', name) + label = match.group('label') + attempt = int(match.group('attempt')) + jobs[label] = max(attempt, jobs.get(label, 0)) + + for label, attempt in jobs.items(): + name = 'Coverage {attempt} {label}'.format(label=label, attempt=attempt) + source = os.path.join(source_directory, name) + source_files = os.listdir(source) + + for source_file in source_files: + source_path = os.path.join(source, source_file) + destination_path = os.path.join(destination_directory, source_file + '.' + label) + print('"%s" -> "%s"' % (source_path, destination_path)) + shutil.copyfile(source_path, destination_path) + count += 1 + + print('Coverage file count: %d' % count) + print('##vso[task.setVariable variable=coverageFileCount]%d' % count) + print('##vso[task.setVariable variable=outputPath]%s' % output_path) + + +if __name__ == '__main__': + main() diff --git a/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/scripts/process-results.sh b/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/scripts/process-results.sh new file mode 100755 index 00000000..f3f1d1ba --- /dev/null +++ b/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/scripts/process-results.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash +# Check the test results and set variables for use in later steps. + +set -o pipefail -eu + +if [[ "$PWD" =~ /ansible_collections/ ]]; then + output_path="tests/output" +else + output_path="test/results" +fi + +echo "##vso[task.setVariable variable=outputPath]${output_path}" + +if compgen -G "${output_path}"'/junit/*.xml' > /dev/null; then + echo "##vso[task.setVariable variable=haveTestResults]true" +fi + +if compgen -G "${output_path}"'/bot/ansible-test-*' > /dev/null; then + echo "##vso[task.setVariable variable=haveBotResults]true" +fi + +if compgen -G "${output_path}"'/coverage/*' > /dev/null; then + echo "##vso[task.setVariable variable=haveCoverageData]true" +fi diff --git a/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/scripts/publish-codecov.sh b/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/scripts/publish-codecov.sh new file mode 100755 index 00000000..7aeabda0 --- /dev/null +++ b/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/scripts/publish-codecov.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash +# Upload code coverage reports to codecov.io. +# Multiple coverage files from multiple languages are accepted and aggregated after upload. +# Python coverage, as well as PowerShell and Python stubs can all be uploaded. + +set -o pipefail -eu + +output_path="$1" + +curl --silent --show-error https://codecov.io/bash > codecov.sh + +for file in "${output_path}"/reports/coverage*.xml; do + name="${file}" + name="${name##*/}" # remove path + name="${name##coverage=}" # remove 'coverage=' prefix if present + name="${name%.xml}" # remove '.xml' suffix + + bash codecov.sh \ + -f "${file}" \ + -n "${name}" \ + -X coveragepy \ + -X gcov \ + -X fix \ + -X search \ + -X xcode \ + || echo "Failed to upload code coverage report to codecov.io: ${file}" +done diff --git a/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/scripts/report-coverage.sh b/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/scripts/report-coverage.sh new file mode 100755 index 00000000..1bd91bdc --- /dev/null +++ b/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/scripts/report-coverage.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env bash +# Generate code coverage reports for uploading to Azure Pipelines and codecov.io. + +set -o pipefail -eu + +PATH="${PWD}/bin:${PATH}" + +if ! ansible-test --help >/dev/null 2>&1; then + # Install the devel version of ansible-test for generating code coverage reports. + # This is only used by Ansible Collections, which are typically tested against multiple Ansible versions (in separate jobs). + # Since a version of ansible-test is required that can work the output from multiple older releases, the devel version is used. + pip install https://github.com/ansible/ansible/archive/devel.tar.gz --disable-pip-version-check +fi + +ansible-test coverage xml --stub --venv --venv-system-site-packages --color -v diff --git a/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/scripts/run-tests.sh b/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/scripts/run-tests.sh new file mode 100755 index 00000000..a947fdf0 --- /dev/null +++ b/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/scripts/run-tests.sh @@ -0,0 +1,34 @@ +#!/usr/bin/env bash +# Configure the test environment and run the tests. + +set -o pipefail -eu + +entry_point="$1" +test="$2" +read -r -a coverage_branches <<< "$3" # space separated list of branches to run code coverage on for scheduled builds + +export COMMIT_MESSAGE +export COMPLETE +export COVERAGE +export IS_PULL_REQUEST + +if [ "${SYSTEM_PULLREQUEST_TARGETBRANCH:-}" ]; then + IS_PULL_REQUEST=true + COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD^2) +else + IS_PULL_REQUEST= + COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD) +fi + +COMPLETE= +COVERAGE= + +if [ "${BUILD_REASON}" = "Schedule" ]; then + COMPLETE=yes + + if printf '%s\n' "${coverage_branches[@]}" | grep -q "^${BUILD_SOURCEBRANCHNAME}$"; then + COVERAGE=yes + fi +fi + +"${entry_point}" "${test}" 2>&1 | "$(dirname "$0")/time-command.py" diff --git a/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/scripts/time-command.py b/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/scripts/time-command.py new file mode 100755 index 00000000..5e8eb8d4 --- /dev/null +++ b/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/scripts/time-command.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python +"""Prepends a relative timestamp to each input line from stdin and writes it to stdout.""" + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import sys +import time + + +def main(): + """Main program entry point.""" + start = time.time() + + sys.stdin.reconfigure(errors='surrogateescape') + sys.stdout.reconfigure(errors='surrogateescape') + + for line in sys.stdin: + seconds = time.time() - start + sys.stdout.write('%02d:%02d %s' % (seconds // 60, seconds % 60, line)) + sys.stdout.flush() + + +if __name__ == '__main__': + main() diff --git a/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/templates/coverage.yml b/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/templates/coverage.yml new file mode 100644 index 00000000..1864e444 --- /dev/null +++ b/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/templates/coverage.yml @@ -0,0 +1,39 @@ +# This template adds a job for processing code coverage data. +# It will upload results to Azure Pipelines and codecov.io. +# Use it from a job stage that completes after all other jobs have completed. +# This can be done by placing it in a separate summary stage that runs after the test stage(s) have completed. + +jobs: + - job: Coverage + displayName: Code Coverage + container: default + workspace: + clean: all + steps: + - checkout: self + fetchDepth: $(fetchDepth) + path: $(checkoutPath) + - task: DownloadPipelineArtifact@2 + displayName: Download Coverage Data + inputs: + path: coverage/ + patterns: "Coverage */*=coverage.combined" + - bash: .azure-pipelines/scripts/combine-coverage.py coverage/ + displayName: Combine Coverage Data + - bash: .azure-pipelines/scripts/report-coverage.sh + displayName: Generate Coverage Report + condition: gt(variables.coverageFileCount, 0) + - task: PublishCodeCoverageResults@1 + inputs: + codeCoverageTool: Cobertura + # Azure Pipelines only accepts a single coverage data file. + # That means only Python or PowerShell coverage can be uploaded, but not both. + # Set the "pipelinesCoverage" variable to determine which type is uploaded. + # Use "coverage" for Python and "coverage-powershell" for PowerShell. + summaryFileLocation: "$(outputPath)/reports/$(pipelinesCoverage).xml" + displayName: Publish to Azure Pipelines + condition: gt(variables.coverageFileCount, 0) + - bash: .azure-pipelines/scripts/publish-codecov.sh "$(outputPath)" + displayName: Publish to codecov.io + condition: gt(variables.coverageFileCount, 0) + continueOnError: true diff --git a/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/templates/matrix.yml b/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/templates/matrix.yml new file mode 100644 index 00000000..4e9555dd --- /dev/null +++ b/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/templates/matrix.yml @@ -0,0 +1,55 @@ +# This template uses the provided targets and optional groups to generate a matrix which is then passed to the test template. +# If this matrix template does not provide the required functionality, consider using the test template directly instead. + +parameters: + # A required list of dictionaries, one per test target. + # Each item in the list must contain a "test" or "name" key. + # Both may be provided. If one is omitted, the other will be used. + - name: targets + type: object + + # An optional list of values which will be used to multiply the targets list into a matrix. + # Values can be strings or numbers. + - name: groups + type: object + default: [] + + # An optional format string used to generate the job name. + # - {0} is the name of an item in the targets list. + - name: nameFormat + type: string + default: "{0}" + + # An optional format string used to generate the test name. + # - {0} is the name of an item in the targets list. + - name: testFormat + type: string + default: "{0}" + + # An optional format string used to add the group to the job name. + # {0} is the formatted name of an item in the targets list. + # {{1}} is the group -- be sure to include the double "{{" and "}}". + - name: nameGroupFormat + type: string + default: "{0} - {{1}}" + + # An optional format string used to add the group to the test name. + # {0} is the formatted test of an item in the targets list. + # {{1}} is the group -- be sure to include the double "{{" and "}}". + - name: testGroupFormat + type: string + default: "{0}/{{1}}" + +jobs: + - template: test.yml + parameters: + jobs: + - ${{ if eq(length(parameters.groups), 0) }}: + - ${{ each target in parameters.targets }}: + - name: ${{ format(parameters.nameFormat, coalesce(target.name, target.test)) }} + test: ${{ format(parameters.testFormat, coalesce(target.test, target.name)) }} + - ${{ if not(eq(length(parameters.groups), 0)) }}: + - ${{ each group in parameters.groups }}: + - ${{ each target in parameters.targets }}: + - name: ${{ format(format(parameters.nameGroupFormat, parameters.nameFormat), coalesce(target.name, target.test), group) }} + test: ${{ format(format(parameters.testGroupFormat, parameters.testFormat), coalesce(target.test, target.name), group) }} diff --git a/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/templates/test.yml b/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/templates/test.yml new file mode 100644 index 00000000..5250ed80 --- /dev/null +++ b/collections-debian-merged/ansible_collections/community/network/.azure-pipelines/templates/test.yml @@ -0,0 +1,45 @@ +# This template uses the provided list of jobs to create test one or more test jobs. +# It can be used directly if needed, or through the matrix template. + +parameters: + # A required list of dictionaries, one per test job. + # Each item in the list must contain a "job" and "name" key. + - name: jobs + type: object + +jobs: + - ${{ each job in parameters.jobs }}: + - job: test_${{ replace(replace(replace(job.test, '/', '_'), '.', '_'), '-', '_') }} + displayName: ${{ job.name }} + container: default + workspace: + clean: all + steps: + - checkout: self + fetchDepth: $(fetchDepth) + path: $(checkoutPath) + - bash: .azure-pipelines/scripts/run-tests.sh "$(entryPoint)" "${{ job.test }}" "$(coverageBranches)" + displayName: Run Tests + - bash: .azure-pipelines/scripts/process-results.sh + condition: succeededOrFailed() + displayName: Process Results + - bash: .azure-pipelines/scripts/aggregate-coverage.sh "$(Agent.TempDirectory)" + condition: eq(variables.haveCoverageData, 'true') + displayName: Aggregate Coverage Data + - task: PublishTestResults@2 + condition: eq(variables.haveTestResults, 'true') + inputs: + testResultsFiles: "$(outputPath)/junit/*.xml" + displayName: Publish Test Results + - task: PublishPipelineArtifact@1 + condition: eq(variables.haveBotResults, 'true') + displayName: Publish Bot Results + inputs: + targetPath: "$(outputPath)/bot/" + artifactName: "Bot $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)" + - task: PublishPipelineArtifact@1 + condition: eq(variables.haveCoverageData, 'true') + displayName: Publish Coverage Data + inputs: + targetPath: "$(Agent.TempDirectory)/coverage/" + artifactName: "Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)" |