summaryrefslogtreecommitdiffstats
path: root/ansible_collections/community/okd/ci
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-13 12:04:41 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-13 12:04:41 +0000
commit975f66f2eebe9dadba04f275774d4ab83f74cf25 (patch)
tree89bd26a93aaae6a25749145b7e4bca4a1e75b2be /ansible_collections/community/okd/ci
parentInitial commit. (diff)
downloadansible-975f66f2eebe9dadba04f275774d4ab83f74cf25.tar.xz
ansible-975f66f2eebe9dadba04f275774d4ab83f74cf25.zip
Adding upstream version 7.7.0+dfsg.upstream/7.7.0+dfsg
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'ansible_collections/community/okd/ci')
-rw-r--r--ansible_collections/community/okd/ci/Dockerfile43
-rwxr-xr-xansible_collections/community/okd/ci/doc_fragment_modules.py97
-rwxr-xr-xansible_collections/community/okd/ci/downstream.sh298
-rwxr-xr-xansible_collections/community/okd/ci/downstream_fragments.py32
-rwxr-xr-xansible_collections/community/okd/ci/incluster_integration.sh92
5 files changed, 562 insertions, 0 deletions
diff --git a/ansible_collections/community/okd/ci/Dockerfile b/ansible_collections/community/okd/ci/Dockerfile
new file mode 100644
index 000000000..1a509190d
--- /dev/null
+++ b/ansible_collections/community/okd/ci/Dockerfile
@@ -0,0 +1,43 @@
+FROM registry.access.redhat.com/ubi8/ubi
+
+ENV OPERATOR=/usr/local/bin/ansible-operator \
+ USER_UID=1001 \
+ USER_NAME=ansible-operator\
+ HOME=/opt/ansible \
+ ANSIBLE_LOCAL_TMP=/opt/ansible/tmp \
+ DOWNSTREAM_BUILD_PYTHON=python3.9
+
+RUN yum install -y \
+ glibc-langpack-en \
+ git \
+ make \
+ python39 \
+ python39-devel \
+ python39-pip \
+ python39-setuptools \
+ gcc \
+ openldap-devel \
+ && pip3 install --no-cache-dir --upgrade setuptools pip \
+ && pip3 install --no-cache-dir \
+ kubernetes \
+ ansible==2.9.* \
+ "molecule<3.3.0" \
+ && yum clean all \
+ && rm -rf $HOME/.cache \
+ && curl -L https://github.com/openshift/okd/releases/download/4.5.0-0.okd-2020-08-12-020541/openshift-client-linux-4.5.0-0.okd-2020-08-12-020541.tar.gz | tar -xz -C /usr/local/bin
+ # TODO: Is there a better way to install this client in ubi8?
+
+COPY . /opt/ansible
+
+WORKDIR /opt/ansible
+
+RUN echo "${USER_NAME}:x:${USER_UID}:0:${USER_NAME} user:${HOME}:/sbin/nologin" >> /etc/passwd \
+ && mkdir -p "${HOME}/.ansible/tmp" \
+ && chown -R "${USER_UID}:0" "${HOME}" \
+ && chmod -R ug+rwX "${HOME}" \
+ && mkdir /go \
+ && chown -R "${USER_UID}:0" /go \
+ && chmod -R ug+rwX /go
+
+
+USER ${USER_UID}
diff --git a/ansible_collections/community/okd/ci/doc_fragment_modules.py b/ansible_collections/community/okd/ci/doc_fragment_modules.py
new file mode 100755
index 000000000..b92abe5bf
--- /dev/null
+++ b/ansible_collections/community/okd/ci/doc_fragment_modules.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import ast
+from pathlib import PosixPath
+import yaml
+import argparse
+import os
+
+
+def read_docstring(filename):
+
+ """
+ Search for assignment of the DOCUMENTATION and EXAMPLES variables in the given file.
+ Parse DOCUMENTATION from YAML and return the YAML doc or None together with EXAMPLES, as plain text.
+ """
+
+ data = {
+ 'doc': None,
+ 'plainexamples': None,
+ 'returndocs': None,
+ 'metadata': None, # NOTE: not used anymore, kept for compat
+ 'seealso': None,
+ }
+
+ string_to_vars = {
+ 'DOCUMENTATION': 'doc',
+ 'EXAMPLES': 'plainexamples',
+ 'RETURN': 'returndocs',
+ 'ANSIBLE_METADATA': 'metadata', # NOTE: now unused, but kept for backwards compat
+ }
+
+ try:
+ with open(filename, 'rb') as b_module_data:
+ M = ast.parse(b_module_data.read())
+
+ for child in M.body:
+ if isinstance(child, ast.Assign):
+ for t in child.targets:
+ try:
+ theid = t.id
+ except AttributeError:
+ # skip errors can happen when trying to use the normal code
+ # sys.stderr.write("Failed to assign id for %s on %s, skipping\n" % (t, filename))
+ continue
+
+ if theid in string_to_vars:
+ varkey = string_to_vars[theid]
+ if isinstance(child.value, ast.Dict):
+ data[varkey] = ast.literal_eval(child.value)
+ else:
+ if theid != 'EXAMPLES':
+ # string should be yaml if already not a dict
+ data[varkey] = child.value.s
+
+ # sys.stderr.write('assigned: %s\n' % varkey)
+
+ except Exception:
+ # sys.stderr.write("unable to parse %s" % filename)
+ return
+
+ return yaml.safe_load(data["doc"]) if data["doc"] is not None else None
+
+
+def is_extending_collection(result, col_fqcn):
+ if result:
+ for x in result.get("extends_documentation_fragment", []):
+ if x.startswith(col_fqcn):
+ return True
+ return False
+
+
+def main():
+
+ parser = argparse.ArgumentParser(
+ description="list modules with inherited doc fragments from kubernetes.core that need rendering to deal with Galaxy/AH lack of functionality."
+ )
+ parser.add_argument(
+ "-c", "--collection-path", type=str, default=os.getcwd(), help="path to the collection"
+ )
+
+ args = parser.parse_args()
+
+ path = PosixPath(args.collection_path) / PosixPath("plugins/modules")
+ output = []
+ for d in path.iterdir():
+ if d.is_file():
+ result = read_docstring(str(d))
+ if is_extending_collection(result, "kubernetes.core."):
+ output.append(d.stem.replace(".py", ""))
+ print("\n".join(output))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/community/okd/ci/downstream.sh b/ansible_collections/community/okd/ci/downstream.sh
new file mode 100755
index 000000000..001959c7e
--- /dev/null
+++ b/ansible_collections/community/okd/ci/downstream.sh
@@ -0,0 +1,298 @@
+#!/bin/bash -eu
+
+# Script to dual-home the upstream and downstream Collection in a single repo
+#
+# This script will build or test a downstream collection, removing any
+# upstream components that will not ship in the downstream release
+#
+# NOTES:
+# - All functions are prefixed with f_ so it's obvious where they come
+# from when in use throughout the script
+
+DOWNSTREAM_VERSION="2.3.0"
+KEEP_DOWNSTREAM_TMPDIR="${KEEP_DOWNSTREAM_TMPDIR:-''}"
+INSTALL_DOWNSTREAM_COLLECTION_PATH="${INSTALL_DOWNSTREAM_COLLECTION_PATH:-}"
+_build_dir=""
+
+f_log_info()
+{
+ printf "%s:LOG:INFO: %s\n" "${0}" "${1}"
+}
+
+f_show_help()
+{
+ printf "Usage: downstream.sh [OPTION]\n"
+ printf "\t-s\t\tCreate a temporary downstream release and perform sanity tests.\n"
+ printf "\t-u\t\tCreate a temporary downstream release and perform units tests.\n"
+ printf "\t-i\t\tCreate a temporary downstream release and perform integration tests.\n"
+ printf "\t-m\t\tCreate a temporary downstream release and perform molecule tests.\n"
+ printf "\t-b\t\tCreate a downstream release and stage for release.\n"
+ printf "\t-r\t\tCreate a downstream release and publish release.\n"
+}
+
+f_text_sub()
+{
+ # Switch FQCN and dependent components
+ OKD_sed_files="${_build_dir}/README.md ${_build_dir}/CHANGELOG.rst ${_build_dir}/changelogs/config.yaml ${_build_dir}/ci/downstream.sh ${_build_dir}/galaxy.yml"
+ # shellcheck disable=SC2068
+ for okd_file in ${OKD_sed_files[@]}; do sed -i.bak "s/OKD/OpenShift/g" "${okd_file}"; done
+
+ sed -i.bak "s/============================/==================================/" "${_build_dir}/CHANGELOG.rst"
+ sed -i.bak "s/Ansible Galaxy/Automation Hub/" "${_build_dir}/README.md"
+ sed -i.bak "s/community-okd/redhat-openshift/" "${_build_dir}/Makefile"
+ sed -i.bak "s/community\/okd/redhat\/openshift/" "${_build_dir}/Makefile"
+ sed -i.bak "s/^VERSION\:/VERSION: ${DOWNSTREAM_VERSION}/" "${_build_dir}/Makefile"
+ sed -i.bak "s/name\:.*$/name: openshift/" "${_build_dir}/galaxy.yml"
+ sed -i.bak "s/namespace\:.*$/namespace: redhat/" "${_build_dir}/galaxy.yml"
+ sed -i.bak "s/Kubernetes/OpenShift/g" "${_build_dir}/galaxy.yml"
+ sed -i.bak "s/^version\:.*$/version: ${DOWNSTREAM_VERSION}/" "${_build_dir}/galaxy.yml"
+ sed -i.bak "/STARTREMOVE/,/ENDREMOVE/d" "${_build_dir}/README.md"
+ sed -i.bak "s/[[:space:]]okd:$/ openshift:/" ${_build_dir}/meta/runtime.yml
+
+ find "${_build_dir}" -type f ! -name galaxy.yml -exec sed -i.bak "s/community\.okd/redhat\.openshift/g" {} \;
+ find "${_build_dir}" -type f -name "*.bak" -delete
+}
+
+f_prep()
+{
+ f_log_info "${FUNCNAME[0]}"
+ # Array of excluded files from downstream build (relative path)
+ _file_exclude=(
+ )
+
+ # Files to copy downstream (relative repo root dir path)
+ _file_manifest=(
+ CHANGELOG.rst
+ galaxy.yml
+ LICENSE
+ README.md
+ Makefile
+ setup.cfg
+ .yamllint
+ requirements.txt
+ requirements.yml
+ test-requirements.txt
+ )
+
+ # Directories to recursively copy downstream (relative repo root dir path)
+ _dir_manifest=(
+ changelogs
+ ci
+ meta
+ molecule
+ plugins
+ tests
+ )
+
+ # Temp build dir
+ _tmp_dir=$(mktemp -d)
+ _start_dir="${PWD}"
+ _build_dir="${_tmp_dir}/ansible_collections/redhat/openshift"
+ mkdir -p "${_build_dir}"
+}
+
+
+f_cleanup()
+{
+ f_log_info "${FUNCNAME[0]}"
+ if [[ -n "${_build_dir}" ]]; then
+ if [[ -n ${KEEP_DOWNSTREAM_TMPDIR} ]]; then
+ if [[ -d ${_build_dir} ]]; then
+ rm -fr "${_build_dir}"
+ fi
+ fi
+ else
+ exit 0
+ fi
+}
+
+# Exit and handle cleanup processes if needed
+f_exit()
+{
+ f_cleanup
+ exit "$0"
+}
+
+f_create_collection_dir_structure()
+{
+ f_log_info "${FUNCNAME[0]}"
+ # Create the Collection
+ for f_name in "${_file_manifest[@]}";
+ do
+ cp "./${f_name}" "${_build_dir}/${f_name}"
+ done
+ for d_name in "${_dir_manifest[@]}";
+ do
+ cp -r "./${d_name}" "${_build_dir}/${d_name}"
+ done
+ if [ -n "${_file_exclude:-}" ]; then
+ for exclude_file in "${_file_exclude[@]}";
+ do
+ if [[ -f "${_build_dir}/${exclude_file}" ]]; then
+ rm -f "${_build_dir}/${exclude_file}"
+ fi
+ done
+ fi
+}
+
+f_handle_doc_fragments_workaround()
+{
+ f_log_info "${FUNCNAME[0]}"
+ local install_collections_dir="${_build_dir}/collections/"
+ local temp_fragments_json="${_tmp_dir}/fragments.json"
+ local temp_start="${_tmp_dir}/startfile.txt"
+ local temp_end="${_tmp_dir}/endfile.txt"
+ local rendered_fragments="./rendereddocfragments.txt"
+
+ # FIXME: Check Python interpreter from environment variable to work with prow
+ PYTHON=${DOWNSTREAM_BUILD_PYTHON:-/usr/bin/python3.6}
+ f_log_info "Using Python interpreter: ${PYTHON}"
+
+ # Modules with inherited doc fragments from kubernetes.core that need
+ # rendering to deal with Galaxy/AH lack of functionality.
+ # shellcheck disable=SC2207
+ _doc_fragment_modules=($("${PYTHON}" "${_start_dir}/ci/doc_fragment_modules.py" -c "${_start_dir}"))
+
+ # Build the collection, export docs, render them, stitch it all back together
+ pushd "${_build_dir}" || return
+ ansible-galaxy collection build
+ ansible-galaxy collection install -p "${install_collections_dir}" ./*.tar.gz
+ rm ./*.tar.gz
+ for doc_fragment_mod in "${_doc_fragment_modules[@]}"
+ do
+ local module_py="plugins/modules/${doc_fragment_mod}.py"
+ f_log_info "Processing doc fragments for ${module_py}"
+ # We need following variable for ansible-doc only
+ # shellcheck disable=SC2097,SC2098
+ ANSIBLE_COLLECTIONS_PATH="${install_collections_dir}" \
+ ANSIBLE_COLLECTIONS_PATHS="${ANSIBLE_COLLECTIONS_PATH}:${install_collections_dir}" \
+ ansible-doc -j "redhat.openshift.${doc_fragment_mod}" > "${temp_fragments_json}"
+ "${PYTHON}" "${_start_dir}/ci/downstream_fragments.py" "redhat.openshift.${doc_fragment_mod}" "${temp_fragments_json}"
+ sed -n '/STARTREMOVE/q;p' "${module_py}" > "${temp_start}"
+ sed '1,/ENDREMOVE/d' "${module_py}" > "${temp_end}"
+ cat "${temp_start}" "${rendered_fragments}" "${temp_end}" > "${module_py}"
+ done
+ rm -f "${rendered_fragments}"
+ rm -fr "${install_collections_dir}"
+ popd
+
+}
+
+f_copy_collection_to_working_dir()
+{
+ f_log_info "${FUNCNAME[0]}"
+ # Copy the Collection build result into original working dir
+ f_log_info "copying built collection *.tar.gz into ./"
+ cp "${_build_dir}"/*.tar.gz ./
+ # Install downstream collection into provided path
+ if [[ -n ${INSTALL_DOWNSTREAM_COLLECTION_PATH} ]]; then
+ f_log_info "Install built collection *.tar.gz into ${INSTALL_DOWNSTREAM_COLLECTION_PATH}"
+ ansible-galaxy collection install -p "${INSTALL_DOWNSTREAM_COLLECTION_PATH}" "${_build_dir}"/*.tar.gz
+ fi
+ rm -f "${_build_dir}"/*.tar.gz
+}
+
+f_common_steps()
+{
+ f_log_info "${FUNCNAME[0]}"
+ f_prep
+ f_create_collection_dir_structure
+ f_text_sub
+ f_handle_doc_fragments_workaround
+}
+
+# Run the test sanity scanerio
+f_test_sanity_option()
+{
+ f_log_info "${FUNCNAME[0]}"
+ f_common_steps
+ pushd "${_build_dir}" || return
+ if command -v docker &> /dev/null
+ then
+ make sanity
+ else
+ SANITY_TEST_ARGS="--venv --color" make sanity
+ fi
+ f_log_info "SANITY TEST PWD: ${PWD}"
+ make sanity
+ popd || return
+ f_cleanup
+}
+
+# Run the test integration
+f_test_integration_option()
+{
+ f_log_info "${FUNCNAME[0]}"
+ f_common_steps
+ pushd "${_build_dir}" || return
+ f_log_info "INTEGRATION TEST WD: ${PWD}"
+ make molecule
+ popd || return
+ f_cleanup
+}
+
+# Run the test units
+f_test_units_option()
+{
+ f_log_info "${FUNCNAME[0]}"
+ f_common_steps
+ pushd "${_build_dir}" || return
+ if command -v docker &> /dev/null
+ then
+ make units
+ else
+ UNITS_TEST_ARGS="--venv --color" make units
+ fi
+ f_log_info "UNITS TEST PWD: ${PWD}"
+ make units
+ popd || return
+ f_cleanup
+}
+
+# Run the build scanerio
+f_build_option()
+{
+ f_log_info "${FUNCNAME[0]}"
+ f_common_steps
+ pushd "${_build_dir}" || return
+ f_log_info "BUILD WD: ${PWD}"
+ make build
+ popd || return
+ f_copy_collection_to_working_dir
+ f_cleanup
+}
+
+# If no options are passed, display usage and exit
+if [[ "${#}" -eq "0" ]]; then
+ f_show_help
+ f_exit 0
+fi
+
+# Handle options
+while getopts ":siurb" option
+do
+ case $option in
+ s)
+ f_test_sanity_option
+ ;;
+ i)
+ f_test_integration_option
+ ;;
+ u)
+ f_test_units_option
+ ;;
+ r)
+ f_release_option
+ ;;
+ b)
+ f_build_option
+ ;;
+ *)
+ printf "ERROR: Unimplemented option chosen.\n"
+ f_show_help
+ f_exit 1
+ ;; # Default.
+ esac
+done
+
+# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
diff --git a/ansible_collections/community/okd/ci/downstream_fragments.py b/ansible_collections/community/okd/ci/downstream_fragments.py
new file mode 100755
index 000000000..727be61e8
--- /dev/null
+++ b/ansible_collections/community/okd/ci/downstream_fragments.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import json
+import sys
+import yaml
+
+with open("./rendereddocfragments.txt", 'w') as df_fd:
+ with open(sys.argv[2], 'r') as fd:
+ json_docs = json.load(fd)
+
+ json_docs[sys.argv[1]]['doc'].pop('collection', '')
+ json_docs[sys.argv[1]]['doc'].pop('filename', '')
+ json_docs[sys.argv[1]]['doc'].pop('has_action', '')
+
+ df_fd.write('DOCUMENTATION = """\n')
+ df_fd.write(yaml.dump(json_docs[sys.argv[1]]['doc'], default_flow_style=False))
+ df_fd.write('"""\n\n')
+
+ df_fd.write('EXAMPLES = """')
+ df_fd.write(json_docs[sys.argv[1]]['examples'])
+ df_fd.write('"""\n\n')
+
+ df_fd.write('RETURN = r"""')
+ data = json_docs[sys.argv[1]]['return']
+ if isinstance(data, dict):
+ df_fd.write(yaml.dump(data, default_flow_style=False))
+ else:
+ df_fd.write(data)
+ df_fd.write('"""\n\n')
diff --git a/ansible_collections/community/okd/ci/incluster_integration.sh b/ansible_collections/community/okd/ci/incluster_integration.sh
new file mode 100755
index 000000000..3d2a55da6
--- /dev/null
+++ b/ansible_collections/community/okd/ci/incluster_integration.sh
@@ -0,0 +1,92 @@
+#!/bin/bash -eu
+
+set -x
+
+NAMESPACE=${NAMESPACE:-default}
+
+# IMAGE_FORMAT is in the form $registry/$org/$image:$$component, ie
+# quay.io/openshift/release:$component
+# To test with your own image, build and push the test image
+# (using the Dockerfile in ci/Dockerfile)
+# and set the IMAGE_FORMAT environment variable so that it properly
+# resolves to your image. For example, quay.io/mynamespace/$component
+# would resolve to quay.io/mynamespace/molecule-test-runner
+# shellcheck disable=SC2034
+component='molecule-test-runner'
+if [[ -n "${MOLECULE_IMAGE}" ]]; then
+ IMAGE="${MOLECULE_IMAGE}"
+else
+ IMAGE="${IMAGE_FORMAT}"
+fi
+
+PULL_POLICY=${PULL_POLICY:-IfNotPresent}
+
+if ! oc get namespace "$NAMESPACE"
+then
+ oc create namespace "$NAMESPACE"
+fi
+
+oc project "$NAMESPACE"
+oc adm policy add-cluster-role-to-user cluster-admin -z default
+oc adm policy who-can create projectrequests
+
+echo "Deleting test job if it exists"
+oc delete job molecule-integration-test --wait --ignore-not-found
+
+echo "Creating molecule test job"
+cat << EOF | oc create -f -
+---
+apiVersion: batch/v1
+kind: Job
+metadata:
+ name: molecule-integration-test
+spec:
+ template:
+ spec:
+ containers:
+ - name: test-runner
+ image: ${IMAGE}
+ imagePullPolicy: ${PULL_POLICY}
+ command:
+ - make
+ - test-integration
+ restartPolicy: Never
+ backoffLimit: 2
+ completions: 1
+ parallelism: 1
+EOF
+
+function check_success {
+ oc wait --for=condition=complete job/molecule-integration-test --timeout 5s -n "$NAMESPACE" \
+ && oc logs job/molecule-integration-test \
+ && echo "Molecule integration tests ran successfully" \
+ && return 0
+ return 1
+}
+
+function check_failure {
+ oc wait --for=condition=failed job/molecule-integration-test --timeout 5s -n "$NAMESPACE" \
+ && oc logs job/molecule-integration-test \
+ && echo "Molecule integration tests failed, see logs for more information..." \
+ && return 0
+ return 1
+}
+
+runtime="30 minute"
+endtime=$(date -ud "$runtime" +%s)
+
+echo "Waiting for test job to complete"
+while [[ $(date -u +%s) -le $endtime ]]
+do
+ if check_success
+ then
+ exit 0
+ elif check_failure
+ then
+ exit 1
+ fi
+ sleep 10
+done
+
+oc logs job/molecule-integration-test
+exit 1