summaryrefslogtreecommitdiffstats
path: root/test/integration/targets/ansible-galaxy-collection/library
diff options
context:
space:
mode:
Diffstat (limited to 'test/integration/targets/ansible-galaxy-collection/library')
-rw-r--r--test/integration/targets/ansible-galaxy-collection/library/reset_pulp.py211
-rw-r--r--test/integration/targets/ansible-galaxy-collection/library/setup_collections.py269
2 files changed, 480 insertions, 0 deletions
diff --git a/test/integration/targets/ansible-galaxy-collection/library/reset_pulp.py b/test/integration/targets/ansible-galaxy-collection/library/reset_pulp.py
new file mode 100644
index 0000000..53c29f7
--- /dev/null
+++ b/test/integration/targets/ansible-galaxy-collection/library/reset_pulp.py
@@ -0,0 +1,211 @@
+#!/usr/bin/python
+
+# Copyright: (c) 2020, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: reset_pulp
+short_description: Resets pulp back to the initial state
+description:
+- See short_description
+options:
+ pulp_api:
+ description:
+ - The Pulp API endpoint.
+ required: yes
+ type: str
+ galaxy_ng_server:
+ description:
+ - The Galaxy NG API endpoint.
+ required: yes
+ type: str
+ url_username:
+ description:
+ - The username to use when authenticating against Pulp.
+ required: yes
+ type: str
+ url_password:
+ description:
+ - The password to use when authenticating against Pulp.
+ required: yes
+ type: str
+ repositories:
+ description:
+ - A list of pulp repositories to create.
+ - Galaxy NG expects a repository that matches C(GALAXY_API_DEFAULT_DISTRIBUTION_BASE_PATH) in
+ C(/etc/pulp/settings.py) or the default of C(published).
+ required: yes
+ type: list
+ elements: str
+ namespaces:
+ description:
+ - A list of namespaces to create for Galaxy NG.
+ required: yes
+ type: list
+ elements: str
+author:
+- Jordan Borean (@jborean93)
+'''
+
+EXAMPLES = '''
+- name: reset pulp content
+ reset_pulp:
+ pulp_api: http://galaxy:24817
+ galaxy_ng_server: http://galaxy/api/galaxy/
+ url_username: username
+ url_password: password
+ repository: published
+ namespaces:
+ - namespace1
+ - namespace2
+'''
+
+RETURN = '''
+#
+'''
+
+import json
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils.urls import fetch_url
+from ansible.module_utils.common.text.converters import to_text
+
+
+def invoke_api(module, url, method='GET', data=None, status_codes=None):
+ status_codes = status_codes or [200]
+ headers = {}
+ if data:
+ headers['Content-Type'] = 'application/json'
+ data = json.dumps(data)
+
+ resp, info = fetch_url(module, url, method=method, data=data, headers=headers)
+ if info['status'] not in status_codes:
+ module.fail_json(url=url, **info)
+
+ data = to_text(resp.read())
+ if data:
+ return json.loads(data)
+
+
+def delete_galaxy_namespace(namespace, module):
+ """ Deletes the galaxy ng namespace specified. """
+ ns_uri = '%sv3/namespaces/%s/' % (module.params['galaxy_ng_server'], namespace)
+ invoke_api(module, ns_uri, method='DELETE', status_codes=[204])
+
+
+def delete_pulp_distribution(distribution, module):
+ """ Deletes the pulp distribution at the URI specified. """
+ task_info = invoke_api(module, distribution, method='DELETE', status_codes=[202])
+ wait_pulp_task(task_info['task'], module)
+
+
+def delete_pulp_orphans(module):
+ """ Deletes any orphaned pulp objects. """
+ orphan_uri = module.params['pulp_api'] + '/pulp/api/v3/orphans/'
+ task_info = invoke_api(module, orphan_uri, method='DELETE', status_codes=[202])
+ wait_pulp_task(task_info['task'], module)
+
+
+def delete_pulp_repository(repository, module):
+ """ Deletes the pulp repository at the URI specified. """
+ task_info = invoke_api(module, repository, method='DELETE', status_codes=[202])
+ wait_pulp_task(task_info['task'], module)
+
+
+def get_galaxy_namespaces(module):
+ """ Gets a list of galaxy namespaces. """
+ # No pagination has been implemented, shouldn't need unless we ever exceed 100 namespaces.
+ namespace_uri = module.params['galaxy_ng_server'] + 'v3/namespaces/?limit=100&offset=0'
+ ns_info = invoke_api(module, namespace_uri)
+
+ return [n['name'] for n in ns_info['data']]
+
+
+def get_pulp_distributions(module):
+ """ Gets a list of all the pulp distributions. """
+ distro_uri = module.params['pulp_api'] + '/pulp/api/v3/distributions/ansible/ansible/'
+ distro_info = invoke_api(module, distro_uri)
+ return [module.params['pulp_api'] + r['pulp_href'] for r in distro_info['results']]
+
+
+def get_pulp_repositories(module):
+ """ Gets a list of all the pulp repositories. """
+ repo_uri = module.params['pulp_api'] + '/pulp/api/v3/repositories/ansible/ansible/'
+ repo_info = invoke_api(module, repo_uri)
+ return [module.params['pulp_api'] + r['pulp_href'] for r in repo_info['results']]
+
+
+def new_galaxy_namespace(name, module):
+ """ Creates a new namespace in Galaxy NG. """
+ ns_uri = module.params['galaxy_ng_server'] + 'v3/_ui/namespaces/'
+ data = {'name': name, 'groups': [{'name': 'system:partner-engineers', 'object_permissions':
+ ['add_namespace', 'change_namespace', 'upload_to_namespace']}]}
+ ns_info = invoke_api(module, ns_uri, method='POST', data=data, status_codes=[201])
+
+ return ns_info['id']
+
+
+def new_pulp_repository(name, module):
+ """ Creates a new pulp repository. """
+ repo_uri = module.params['pulp_api'] + '/pulp/api/v3/repositories/ansible/ansible/'
+ data = {'name': name}
+ repo_info = invoke_api(module, repo_uri, method='POST', data=data, status_codes=[201])
+
+ return module.params['pulp_api'] + repo_info['pulp_href']
+
+
+def new_pulp_distribution(name, base_path, repository, module):
+ """ Creates a new pulp distribution for a repository. """
+ distro_uri = module.params['pulp_api'] + '/pulp/api/v3/distributions/ansible/ansible/'
+ data = {'name': name, 'base_path': base_path, 'repository': repository}
+ task_info = invoke_api(module, distro_uri, method='POST', data=data, status_codes=[202])
+ task_info = wait_pulp_task(task_info['task'], module)
+
+ return module.params['pulp_api'] + task_info['created_resources'][0]
+
+
+def wait_pulp_task(task, module):
+ """ Waits for a pulp import task to finish. """
+ while True:
+ task_info = invoke_api(module, module.params['pulp_api'] + task)
+ if task_info['finished_at'] is not None:
+ break
+
+ return task_info
+
+
+def main():
+ module_args = dict(
+ pulp_api=dict(type='str', required=True),
+ galaxy_ng_server=dict(type='str', required=True),
+ url_username=dict(type='str', required=True),
+ url_password=dict(type='str', required=True, no_log=True),
+ repositories=dict(type='list', elements='str', required=True),
+ namespaces=dict(type='list', elements='str', required=True),
+ )
+
+ module = AnsibleModule(
+ argument_spec=module_args,
+ supports_check_mode=False
+ )
+ module.params['force_basic_auth'] = True
+
+ [delete_pulp_distribution(d, module) for d in get_pulp_distributions(module)]
+ [delete_pulp_repository(r, module) for r in get_pulp_repositories(module)]
+ delete_pulp_orphans(module)
+ [delete_galaxy_namespace(n, module) for n in get_galaxy_namespaces(module)]
+
+ for repository in module.params['repositories']:
+ repo_href = new_pulp_repository(repository, module)
+ new_pulp_distribution(repository, repository, repo_href, module)
+ [new_galaxy_namespace(n, module) for n in module.params['namespaces']]
+
+ module.exit_json(changed=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/integration/targets/ansible-galaxy-collection/library/setup_collections.py b/test/integration/targets/ansible-galaxy-collection/library/setup_collections.py
new file mode 100644
index 0000000..35b18de
--- /dev/null
+++ b/test/integration/targets/ansible-galaxy-collection/library/setup_collections.py
@@ -0,0 +1,269 @@
+#!/usr/bin/python
+
+# Copyright: (c) 2020, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+ANSIBLE_METADATA = {
+ 'metadata_version': '1.1',
+ 'status': ['preview'],
+ 'supported_by': 'community'
+}
+
+DOCUMENTATION = '''
+---
+module: setup_collections
+short_description: Set up test collections based on the input
+description:
+- Builds and publishes a whole bunch of collections used for testing in bulk.
+options:
+ server:
+ description:
+ - The Galaxy server to upload the collections to.
+ required: yes
+ type: str
+ token:
+ description:
+ - The token used to authenticate with the Galaxy server.
+ required: yes
+ type: str
+ collections:
+ description:
+ - A list of collection details to use for the build.
+ required: yes
+ type: list
+ elements: dict
+ options:
+ namespace:
+ description:
+ - The namespace of the collection.
+ required: yes
+ type: str
+ name:
+ description:
+ - The name of the collection.
+ required: yes
+ type: str
+ version:
+ description:
+ - The version of the collection.
+ type: str
+ default: '1.0.0'
+ dependencies:
+ description:
+ - The dependencies of the collection.
+ type: dict
+ default: '{}'
+author:
+- Jordan Borean (@jborean93)
+'''
+
+EXAMPLES = '''
+- name: Build test collections
+ setup_collections:
+ path: ~/ansible/collections/ansible_collections
+ collections:
+ - namespace: namespace1
+ name: name1
+ version: 0.0.1
+ - namespace: namespace1
+ name: name1
+ version: 0.0.2
+'''
+
+RETURN = '''
+#
+'''
+
+import os
+import subprocess
+import tarfile
+import tempfile
+import yaml
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils._text import to_bytes
+from functools import partial
+from multiprocessing import dummy as threading
+from multiprocessing import TimeoutError
+
+
+COLLECTIONS_BUILD_AND_PUBLISH_TIMEOUT = 120
+
+
+def publish_collection(module, collection):
+ namespace = collection['namespace']
+ name = collection['name']
+ version = collection['version']
+ dependencies = collection['dependencies']
+ use_symlink = collection['use_symlink']
+
+ result = {}
+ collection_dir = os.path.join(module.tmpdir, "%s-%s-%s" % (namespace, name, version))
+ b_collection_dir = to_bytes(collection_dir, errors='surrogate_or_strict')
+ os.mkdir(b_collection_dir)
+
+ with open(os.path.join(b_collection_dir, b'README.md'), mode='wb') as fd:
+ fd.write(b"Collection readme")
+
+ galaxy_meta = {
+ 'namespace': namespace,
+ 'name': name,
+ 'version': version,
+ 'readme': 'README.md',
+ 'authors': ['Collection author <name@email.com'],
+ 'dependencies': dependencies,
+ 'license': ['GPL-3.0-or-later'],
+ 'repository': 'https://ansible.com/',
+ }
+ with open(os.path.join(b_collection_dir, b'galaxy.yml'), mode='wb') as fd:
+ fd.write(to_bytes(yaml.safe_dump(galaxy_meta), errors='surrogate_or_strict'))
+
+ with tempfile.NamedTemporaryFile(mode='wb') as temp_fd:
+ temp_fd.write(b"data")
+
+ if use_symlink:
+ os.mkdir(os.path.join(b_collection_dir, b'docs'))
+ os.mkdir(os.path.join(b_collection_dir, b'plugins'))
+ b_target_file = b'RE\xc3\x85DM\xc3\x88.md'
+ with open(os.path.join(b_collection_dir, b_target_file), mode='wb') as fd:
+ fd.write(b'data')
+
+ os.symlink(b_target_file, os.path.join(b_collection_dir, b_target_file + b'-link'))
+ os.symlink(temp_fd.name, os.path.join(b_collection_dir, b_target_file + b'-outside-link'))
+ os.symlink(os.path.join(b'..', b_target_file), os.path.join(b_collection_dir, b'docs', b_target_file))
+ os.symlink(os.path.join(b_collection_dir, b_target_file),
+ os.path.join(b_collection_dir, b'plugins', b_target_file))
+ os.symlink(b'docs', os.path.join(b_collection_dir, b'docs-link'))
+
+ release_filename = '%s-%s-%s.tar.gz' % (namespace, name, version)
+ collection_path = os.path.join(collection_dir, release_filename)
+ rc, stdout, stderr = module.run_command(['ansible-galaxy', 'collection', 'build'], cwd=collection_dir)
+ result['build'] = {
+ 'rc': rc,
+ 'stdout': stdout,
+ 'stderr': stderr,
+ }
+
+ if module.params['signature_dir'] is not None:
+ # To test user-provided signatures, we need to sign the MANIFEST.json before publishing
+
+ # Extract the tarfile to sign the MANIFEST.json
+ with tarfile.open(collection_path, mode='r') as collection_tar:
+ collection_tar.extractall(path=os.path.join(collection_dir, '%s-%s-%s' % (namespace, name, version)))
+
+ manifest_path = os.path.join(collection_dir, '%s-%s-%s' % (namespace, name, version), 'MANIFEST.json')
+ signature_path = os.path.join(module.params['signature_dir'], '%s-%s-%s-MANIFEST.json.asc' % (namespace, name, version))
+ sign_manifest(signature_path, manifest_path, module, result)
+
+ # Create the tarfile containing the signed MANIFEST.json
+ with tarfile.open(collection_path, "w:gz") as tar:
+ tar.add(os.path.join(collection_dir, '%s-%s-%s' % (namespace, name, version)), arcname=os.path.sep)
+
+ publish_args = ['ansible-galaxy', 'collection', 'publish', collection_path, '--server', module.params['server']]
+ if module.params['token']:
+ publish_args.extend(['--token', module.params['token']])
+
+ rc, stdout, stderr = module.run_command(publish_args)
+ result['publish'] = {
+ 'rc': rc,
+ 'stdout': stdout,
+ 'stderr': stderr,
+ }
+
+ return result
+
+
+def sign_manifest(signature_path, manifest_path, module, collection_setup_result):
+ collection_setup_result['gpg_detach_sign'] = {'signature_path': signature_path}
+
+ status_fd_read, status_fd_write = os.pipe()
+ gpg_cmd = [
+ "gpg",
+ "--batch",
+ "--pinentry-mode",
+ "loopback",
+ "--yes",
+ "--homedir",
+ module.params['signature_dir'],
+ "--detach-sign",
+ "--armor",
+ "--output",
+ signature_path,
+ manifest_path,
+ ]
+ try:
+ p = subprocess.Popen(
+ gpg_cmd,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ pass_fds=(status_fd_write,),
+ encoding='utf8',
+ )
+ except (FileNotFoundError, subprocess.SubprocessError) as err:
+ collection_setup_result['gpg_detach_sign']['error'] = "Failed during GnuPG verification with command '{gpg_cmd}': {err}".format(
+ gpg_cmd=gpg_cmd, err=err
+ )
+ else:
+ stdout, stderr = p.communicate()
+ collection_setup_result['gpg_detach_sign']['stdout'] = stdout
+ if stderr:
+ error = "Failed during GnuPG verification with command '{gpg_cmd}':\n{stderr}".format(gpg_cmd=gpg_cmd, stderr=stderr)
+ collection_setup_result['gpg_detach_sign']['error'] = error
+ finally:
+ os.close(status_fd_write)
+
+
+def run_module():
+ module_args = dict(
+ server=dict(type='str', required=True),
+ token=dict(type='str'),
+ collections=dict(
+ type='list',
+ elements='dict',
+ required=True,
+ options=dict(
+ namespace=dict(type='str', required=True),
+ name=dict(type='str', required=True),
+ version=dict(type='str', default='1.0.0'),
+ dependencies=dict(type='dict', default={}),
+ use_symlink=dict(type='bool', default=False),
+ ),
+ ),
+ signature_dir=dict(type='path', default=None),
+ )
+
+ module = AnsibleModule(
+ argument_spec=module_args,
+ supports_check_mode=False
+ )
+
+ result = dict(changed=True, results=[])
+
+ pool = threading.Pool(4)
+ publish_func = partial(publish_collection, module)
+ try:
+ result['results'] = pool.map_async(
+ publish_func, module.params['collections'],
+ ).get(timeout=COLLECTIONS_BUILD_AND_PUBLISH_TIMEOUT)
+ except TimeoutError as timeout_err:
+ module.fail_json(
+ 'Timed out waiting for collections to be provisioned.',
+ )
+
+ failed = bool(sum(
+ r['build']['rc'] + r['publish']['rc'] for r in result['results']
+ ))
+
+ module.exit_json(failed=failed, **result)
+
+
+def main():
+ run_module()
+
+
+if __name__ == '__main__':
+ main()