diff options
Diffstat (limited to 'ansible_collections/community/postgresql/.azure-pipelines')
12 files changed, 607 insertions, 0 deletions
diff --git a/ansible_collections/community/postgresql/.azure-pipelines/README.md b/ansible_collections/community/postgresql/.azure-pipelines/README.md new file mode 100644 index 000000000..385e70bac --- /dev/null +++ b/ansible_collections/community/postgresql/.azure-pipelines/README.md @@ -0,0 +1,3 @@ +## Azure Pipelines Configuration + +Please see the [Documentation](https://github.com/ansible/community/wiki/Testing:-Azure-Pipelines) for more information. diff --git a/ansible_collections/community/postgresql/.azure-pipelines/azure-pipelines.yml b/ansible_collections/community/postgresql/.azure-pipelines/azure-pipelines.yml new file mode 100644 index 000000000..4a34c9edf --- /dev/null +++ b/ansible_collections/community/postgresql/.azure-pipelines/azure-pipelines.yml @@ -0,0 +1,260 @@ +trigger: + batch: true + branches: + include: + - main + - stable-* + +pr: + autoCancel: true + branches: + include: + - main + - stable-* + +schedules: + - cron: 0 9 * * * + displayName: Nightly + always: true + branches: + include: + - main + - stable-* + +variables: + - name: checkoutPath + value: ansible_collections/community/postgresql + - name: coverageBranches + value: main + - name: pipelinesCoverage + value: coverage + - name: entryPoint + value: tests/utils/shippable/shippable.sh + - name: fetchDepth + value: 0 + +resources: + containers: + - container: default + image: quay.io/ansible/azure-pipelines-test-container:3.0.0 + +pool: Standard + +stages: +## Sanity & units + - stage: Ansible_devel + displayName: Sanity & Units devel + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + targets: + - name: Sanity + test: 'devel/sanity/1' + - name: Sanity Extra # Only on devel + test: 'devel/sanity/extra' + - name: Units + test: 'devel/units/1' + + - stage: Ansible_2_15 + displayName: Sanity & Units 2.15 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + targets: + - name: Sanity + test: '2.15/sanity/1' + - name: Units + test: '2.15/units/1' + + - stage: Ansible_2_14 + displayName: Sanity & Units 2.14 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + targets: + - name: Sanity + test: '2.14/sanity/1' + - name: Units + test: '2.14/units/1' + + - stage: Ansible_2_13 + displayName: Sanity & Units 2.13 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + targets: + - name: Sanity + test: '2.13/sanity/1' + - name: Units + test: '2.13/units/1' + + - stage: Ansible_2_12 + displayName: Sanity & Units 2.12 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + targets: + - name: Sanity + test: '2.12/sanity/1' + - name: Units + test: '2.12/units/1' + +## Docker + - stage: Docker_devel + displayName: Docker devel + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: devel/linux/{0}/1 + targets: + - name: CentOS 7 + test: centos7 + - name: Fedora 37 + test: fedora37 + - name: Ubuntu 20.04 + test: ubuntu2004 + + - stage: Docker_2_15 + displayName: Docker 2.15 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: 2.15/linux/{0}/1 + targets: + - name: CentOS 7 + test: centos7 + - name: Fedora 37 + test: fedora37 + - name: Ubuntu 20.04 + test: ubuntu2004 + + - stage: Docker_2_14 + displayName: Docker 2.14 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: 2.14/linux/{0}/1 + targets: + - name: CentOS 7 + test: centos7 + - name: Fedora 36 + test: fedora36 + - name: Ubuntu 20.04 + test: ubuntu2004 + + - stage: Docker_2_13 + displayName: Docker 2.13 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: 2.13/linux/{0}/1 + targets: + - name: CentOS 7 + test: centos7 + - name: Fedora 35 + test: fedora35 + - name: Ubuntu 20.04 + test: ubuntu2004 + + - stage: Docker_2_12 + displayName: Docker 2.12 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: 2.12/linux/{0}/1 + targets: + - name: CentOS 7 + test: centos7 + - name: Fedora 34 + test: fedora34 + - name: Ubuntu 20.04 + test: ubuntu2004 + +## Remote + - stage: Remote_devel + displayName: Remote devel + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: devel/{0}/1 + targets: + - name: RHEL 8.7 + test: rhel/8.7 + + - stage: Remote_2_15 + displayName: Remote 2.15 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: 2.15/{0}/1 + targets: + - name: RHEL 8.7 + test: rhel/8.7 + + - stage: Remote_2_14 + displayName: Remote 2.14 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: 2.14/{0}/1 + targets: + - name: RHEL 8.6 + test: rhel/8.6 + + - stage: Remote_2_13 + displayName: Remote 2.13 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: 2.13/{0}/1 + targets: + - name: RHEL 8.5 + test: rhel/8.5 + + - stage: Remote_2_12 + displayName: Remote 2.12 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: 2.12/{0}/1 + targets: + - name: RHEL 8.4 + test: rhel/8.4 + +## Finally + + - stage: Summary + condition: succeededOrFailed() + dependsOn: + - Ansible_devel + - Ansible_2_15 + - Ansible_2_14 + - Ansible_2_13 + - Ansible_2_12 + - Docker_devel + - Docker_2_15 + - Docker_2_14 + - Docker_2_13 + - Docker_2_12 + - Remote_devel + - Remote_2_15 + - Remote_2_14 + - Remote_2_13 + - Remote_2_12 + jobs: + - template: templates/coverage.yml diff --git a/ansible_collections/community/postgresql/.azure-pipelines/scripts/aggregate-coverage.sh b/ansible_collections/community/postgresql/.azure-pipelines/scripts/aggregate-coverage.sh new file mode 100755 index 000000000..f3113dd0a --- /dev/null +++ b/ansible_collections/community/postgresql/.azure-pipelines/scripts/aggregate-coverage.sh @@ -0,0 +1,20 @@ +#!/usr/bin/env bash +# Aggregate code coverage results for later processing. + +set -o pipefail -eu + +agent_temp_directory="$1" + +PATH="${PWD}/bin:${PATH}" + +mkdir "${agent_temp_directory}/coverage/" + +options=(--venv --venv-system-site-packages --color -v) + +ansible-test coverage combine --export "${agent_temp_directory}/coverage/" "${options[@]}" + +if ansible-test coverage analyze targets generate --help >/dev/null 2>&1; then + # Only analyze coverage if the installed version of ansible-test supports it. + # Doing so allows this script to work unmodified for multiple Ansible versions. + ansible-test coverage analyze targets generate "${agent_temp_directory}/coverage/coverage-analyze-targets.json" "${options[@]}" +fi diff --git a/ansible_collections/community/postgresql/.azure-pipelines/scripts/combine-coverage.py b/ansible_collections/community/postgresql/.azure-pipelines/scripts/combine-coverage.py new file mode 100755 index 000000000..506ade646 --- /dev/null +++ b/ansible_collections/community/postgresql/.azure-pipelines/scripts/combine-coverage.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python +""" +Combine coverage data from multiple jobs, keeping the data only from the most recent attempt from each job. +Coverage artifacts must be named using the format: "Coverage $(System.JobAttempt) {StableUniqueNameForEachJob}" +The recommended coverage artifact name format is: Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName) +Keep in mind that Azure Pipelines does not enforce unique job display names (only names). +It is up to pipeline authors to avoid name collisions when deviating from the recommended format. +""" + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import re +import shutil +import sys + + +def main(): + """Main program entry point.""" + source_directory = sys.argv[1] + + if '/ansible_collections/' in os.getcwd(): + output_path = "tests/output" + else: + output_path = "test/results" + + destination_directory = os.path.join(output_path, 'coverage') + + if not os.path.exists(destination_directory): + os.makedirs(destination_directory) + + jobs = {} + count = 0 + + for name in os.listdir(source_directory): + match = re.search('^Coverage (?P<attempt>[0-9]+) (?P<label>.+)$', name) + label = match.group('label') + attempt = int(match.group('attempt')) + jobs[label] = max(attempt, jobs.get(label, 0)) + + for label, attempt in jobs.items(): + name = 'Coverage {attempt} {label}'.format(label=label, attempt=attempt) + source = os.path.join(source_directory, name) + source_files = os.listdir(source) + + for source_file in source_files: + source_path = os.path.join(source, source_file) + destination_path = os.path.join(destination_directory, source_file + '.' + label) + print('"%s" -> "%s"' % (source_path, destination_path)) + shutil.copyfile(source_path, destination_path) + count += 1 + + print('Coverage file count: %d' % count) + print('##vso[task.setVariable variable=coverageFileCount]%d' % count) + print('##vso[task.setVariable variable=outputPath]%s' % output_path) + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/community/postgresql/.azure-pipelines/scripts/process-results.sh b/ansible_collections/community/postgresql/.azure-pipelines/scripts/process-results.sh new file mode 100755 index 000000000..f3f1d1bae --- /dev/null +++ b/ansible_collections/community/postgresql/.azure-pipelines/scripts/process-results.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash +# Check the test results and set variables for use in later steps. + +set -o pipefail -eu + +if [[ "$PWD" =~ /ansible_collections/ ]]; then + output_path="tests/output" +else + output_path="test/results" +fi + +echo "##vso[task.setVariable variable=outputPath]${output_path}" + +if compgen -G "${output_path}"'/junit/*.xml' > /dev/null; then + echo "##vso[task.setVariable variable=haveTestResults]true" +fi + +if compgen -G "${output_path}"'/bot/ansible-test-*' > /dev/null; then + echo "##vso[task.setVariable variable=haveBotResults]true" +fi + +if compgen -G "${output_path}"'/coverage/*' > /dev/null; then + echo "##vso[task.setVariable variable=haveCoverageData]true" +fi diff --git a/ansible_collections/community/postgresql/.azure-pipelines/scripts/publish-codecov.sh b/ansible_collections/community/postgresql/.azure-pipelines/scripts/publish-codecov.sh new file mode 100755 index 000000000..6d184f0b8 --- /dev/null +++ b/ansible_collections/community/postgresql/.azure-pipelines/scripts/publish-codecov.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash +# Upload code coverage reports to codecov.io. +# Multiple coverage files from multiple languages are accepted and aggregated after upload. +# Python coverage, as well as PowerShell and Python stubs can all be uploaded. + +set -o pipefail -eu + +output_path="$1" + +curl --silent --show-error https://ansible-ci-files.s3.us-east-1.amazonaws.com/codecov/codecov.sh > codecov.sh + +for file in "${output_path}"/reports/coverage*.xml; do + name="${file}" + name="${name##*/}" # remove path + name="${name##coverage=}" # remove 'coverage=' prefix if present + name="${name%.xml}" # remove '.xml' suffix + + bash codecov.sh \ + -f "${file}" \ + -n "${name}" \ + -X coveragepy \ + -X gcov \ + -X fix \ + -X search \ + -X xcode \ + || echo "Failed to upload code coverage report to codecov.io: ${file}" +done diff --git a/ansible_collections/community/postgresql/.azure-pipelines/scripts/report-coverage.sh b/ansible_collections/community/postgresql/.azure-pipelines/scripts/report-coverage.sh new file mode 100755 index 000000000..1bd91bdc9 --- /dev/null +++ b/ansible_collections/community/postgresql/.azure-pipelines/scripts/report-coverage.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env bash +# Generate code coverage reports for uploading to Azure Pipelines and codecov.io. + +set -o pipefail -eu + +PATH="${PWD}/bin:${PATH}" + +if ! ansible-test --help >/dev/null 2>&1; then + # Install the devel version of ansible-test for generating code coverage reports. + # This is only used by Ansible Collections, which are typically tested against multiple Ansible versions (in separate jobs). + # Since a version of ansible-test is required that can work the output from multiple older releases, the devel version is used. + pip install https://github.com/ansible/ansible/archive/devel.tar.gz --disable-pip-version-check +fi + +ansible-test coverage xml --stub --venv --venv-system-site-packages --color -v diff --git a/ansible_collections/community/postgresql/.azure-pipelines/scripts/run-tests.sh b/ansible_collections/community/postgresql/.azure-pipelines/scripts/run-tests.sh new file mode 100755 index 000000000..a947fdf01 --- /dev/null +++ b/ansible_collections/community/postgresql/.azure-pipelines/scripts/run-tests.sh @@ -0,0 +1,34 @@ +#!/usr/bin/env bash +# Configure the test environment and run the tests. + +set -o pipefail -eu + +entry_point="$1" +test="$2" +read -r -a coverage_branches <<< "$3" # space separated list of branches to run code coverage on for scheduled builds + +export COMMIT_MESSAGE +export COMPLETE +export COVERAGE +export IS_PULL_REQUEST + +if [ "${SYSTEM_PULLREQUEST_TARGETBRANCH:-}" ]; then + IS_PULL_REQUEST=true + COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD^2) +else + IS_PULL_REQUEST= + COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD) +fi + +COMPLETE= +COVERAGE= + +if [ "${BUILD_REASON}" = "Schedule" ]; then + COMPLETE=yes + + if printf '%s\n' "${coverage_branches[@]}" | grep -q "^${BUILD_SOURCEBRANCHNAME}$"; then + COVERAGE=yes + fi +fi + +"${entry_point}" "${test}" 2>&1 | "$(dirname "$0")/time-command.py" diff --git a/ansible_collections/community/postgresql/.azure-pipelines/scripts/time-command.py b/ansible_collections/community/postgresql/.azure-pipelines/scripts/time-command.py new file mode 100755 index 000000000..5e8eb8d4c --- /dev/null +++ b/ansible_collections/community/postgresql/.azure-pipelines/scripts/time-command.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python +"""Prepends a relative timestamp to each input line from stdin and writes it to stdout.""" + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import sys +import time + + +def main(): + """Main program entry point.""" + start = time.time() + + sys.stdin.reconfigure(errors='surrogateescape') + sys.stdout.reconfigure(errors='surrogateescape') + + for line in sys.stdin: + seconds = time.time() - start + sys.stdout.write('%02d:%02d %s' % (seconds // 60, seconds % 60, line)) + sys.stdout.flush() + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/community/postgresql/.azure-pipelines/templates/coverage.yml b/ansible_collections/community/postgresql/.azure-pipelines/templates/coverage.yml new file mode 100644 index 000000000..1864e4441 --- /dev/null +++ b/ansible_collections/community/postgresql/.azure-pipelines/templates/coverage.yml @@ -0,0 +1,39 @@ +# This template adds a job for processing code coverage data. +# It will upload results to Azure Pipelines and codecov.io. +# Use it from a job stage that completes after all other jobs have completed. +# This can be done by placing it in a separate summary stage that runs after the test stage(s) have completed. + +jobs: + - job: Coverage + displayName: Code Coverage + container: default + workspace: + clean: all + steps: + - checkout: self + fetchDepth: $(fetchDepth) + path: $(checkoutPath) + - task: DownloadPipelineArtifact@2 + displayName: Download Coverage Data + inputs: + path: coverage/ + patterns: "Coverage */*=coverage.combined" + - bash: .azure-pipelines/scripts/combine-coverage.py coverage/ + displayName: Combine Coverage Data + - bash: .azure-pipelines/scripts/report-coverage.sh + displayName: Generate Coverage Report + condition: gt(variables.coverageFileCount, 0) + - task: PublishCodeCoverageResults@1 + inputs: + codeCoverageTool: Cobertura + # Azure Pipelines only accepts a single coverage data file. + # That means only Python or PowerShell coverage can be uploaded, but not both. + # Set the "pipelinesCoverage" variable to determine which type is uploaded. + # Use "coverage" for Python and "coverage-powershell" for PowerShell. + summaryFileLocation: "$(outputPath)/reports/$(pipelinesCoverage).xml" + displayName: Publish to Azure Pipelines + condition: gt(variables.coverageFileCount, 0) + - bash: .azure-pipelines/scripts/publish-codecov.sh "$(outputPath)" + displayName: Publish to codecov.io + condition: gt(variables.coverageFileCount, 0) + continueOnError: true diff --git a/ansible_collections/community/postgresql/.azure-pipelines/templates/matrix.yml b/ansible_collections/community/postgresql/.azure-pipelines/templates/matrix.yml new file mode 100644 index 000000000..4e9555dd3 --- /dev/null +++ b/ansible_collections/community/postgresql/.azure-pipelines/templates/matrix.yml @@ -0,0 +1,55 @@ +# This template uses the provided targets and optional groups to generate a matrix which is then passed to the test template. +# If this matrix template does not provide the required functionality, consider using the test template directly instead. + +parameters: + # A required list of dictionaries, one per test target. + # Each item in the list must contain a "test" or "name" key. + # Both may be provided. If one is omitted, the other will be used. + - name: targets + type: object + + # An optional list of values which will be used to multiply the targets list into a matrix. + # Values can be strings or numbers. + - name: groups + type: object + default: [] + + # An optional format string used to generate the job name. + # - {0} is the name of an item in the targets list. + - name: nameFormat + type: string + default: "{0}" + + # An optional format string used to generate the test name. + # - {0} is the name of an item in the targets list. + - name: testFormat + type: string + default: "{0}" + + # An optional format string used to add the group to the job name. + # {0} is the formatted name of an item in the targets list. + # {{1}} is the group -- be sure to include the double "{{" and "}}". + - name: nameGroupFormat + type: string + default: "{0} - {{1}}" + + # An optional format string used to add the group to the test name. + # {0} is the formatted test of an item in the targets list. + # {{1}} is the group -- be sure to include the double "{{" and "}}". + - name: testGroupFormat + type: string + default: "{0}/{{1}}" + +jobs: + - template: test.yml + parameters: + jobs: + - ${{ if eq(length(parameters.groups), 0) }}: + - ${{ each target in parameters.targets }}: + - name: ${{ format(parameters.nameFormat, coalesce(target.name, target.test)) }} + test: ${{ format(parameters.testFormat, coalesce(target.test, target.name)) }} + - ${{ if not(eq(length(parameters.groups), 0)) }}: + - ${{ each group in parameters.groups }}: + - ${{ each target in parameters.targets }}: + - name: ${{ format(format(parameters.nameGroupFormat, parameters.nameFormat), coalesce(target.name, target.test), group) }} + test: ${{ format(format(parameters.testGroupFormat, parameters.testFormat), coalesce(target.test, target.name), group) }} diff --git a/ansible_collections/community/postgresql/.azure-pipelines/templates/test.yml b/ansible_collections/community/postgresql/.azure-pipelines/templates/test.yml new file mode 100644 index 000000000..5250ed802 --- /dev/null +++ b/ansible_collections/community/postgresql/.azure-pipelines/templates/test.yml @@ -0,0 +1,45 @@ +# This template uses the provided list of jobs to create test one or more test jobs. +# It can be used directly if needed, or through the matrix template. + +parameters: + # A required list of dictionaries, one per test job. + # Each item in the list must contain a "job" and "name" key. + - name: jobs + type: object + +jobs: + - ${{ each job in parameters.jobs }}: + - job: test_${{ replace(replace(replace(job.test, '/', '_'), '.', '_'), '-', '_') }} + displayName: ${{ job.name }} + container: default + workspace: + clean: all + steps: + - checkout: self + fetchDepth: $(fetchDepth) + path: $(checkoutPath) + - bash: .azure-pipelines/scripts/run-tests.sh "$(entryPoint)" "${{ job.test }}" "$(coverageBranches)" + displayName: Run Tests + - bash: .azure-pipelines/scripts/process-results.sh + condition: succeededOrFailed() + displayName: Process Results + - bash: .azure-pipelines/scripts/aggregate-coverage.sh "$(Agent.TempDirectory)" + condition: eq(variables.haveCoverageData, 'true') + displayName: Aggregate Coverage Data + - task: PublishTestResults@2 + condition: eq(variables.haveTestResults, 'true') + inputs: + testResultsFiles: "$(outputPath)/junit/*.xml" + displayName: Publish Test Results + - task: PublishPipelineArtifact@1 + condition: eq(variables.haveBotResults, 'true') + displayName: Publish Bot Results + inputs: + targetPath: "$(outputPath)/bot/" + artifactName: "Bot $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)" + - task: PublishPipelineArtifact@1 + condition: eq(variables.haveCoverageData, 'true') + displayName: Publish Coverage Data + inputs: + targetPath: "$(Agent.TempDirectory)/coverage/" + artifactName: "Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)" |