summaryrefslogtreecommitdiffstats
path: root/ansible_collections/community/general/tests/unit
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-18 05:52:22 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-18 05:52:22 +0000
commit38b7c80217c4e72b1d8988eb1e60bb6e77334114 (patch)
tree356e9fd3762877d07cde52d21e77070aeff7e789 /ansible_collections/community/general/tests/unit
parentAdding upstream version 7.7.0+dfsg. (diff)
downloadansible-38b7c80217c4e72b1d8988eb1e60bb6e77334114.tar.xz
ansible-38b7c80217c4e72b1d8988eb1e60bb6e77334114.zip
Adding upstream version 9.4.0+dfsg.upstream/9.4.0+dfsg
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'ansible_collections/community/general/tests/unit')
-rw-r--r--ansible_collections/community/general/tests/unit/mock/loader.py2
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/become/helper.py2
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/callback/test_loganalytics.py7
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/connection/test_lxc.py134
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/inventory/test_icinga2.py3
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/inventory/test_linode.py18
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/inventory/test_proxmox.py45
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/lookup/conftest.py (renamed from ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_conftest.py)8
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_common.py210
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_fixtures/v2_out_01.json4
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_fixtures/v2_out_04.json67
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_fixtures/v2_out_04.json.license3
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_fixtures/v2_out_05.json102
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_fixtures/v2_out_05.json.license3
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/lookup/test_bitwarden.py47
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/lookup/test_bitwarden_secrets_manager.py83
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/lookup/test_github_app_access_token.py52
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/lookup/test_merge_variables.py161
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/lookup/test_onepassword.py67
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/module_utils/hwc/test_hwc_utils.py25
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/module_utils/test_cmd_runner.py102
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/module_utils/test_module_helper.py80
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/module_utils/test_vardict.py134
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/gitlab.py60
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/helper.py181
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_cpanm.py277
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_cpanm.yaml220
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_dnf_config_manager.py402
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_dnsimple.py4
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_dnsimple_info.py2
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_facter_facts.py14
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_facter_facts.yaml40
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_gconftool2.py106
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_gconftool2.yaml117
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_gconftool2_info.py93
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_gconftool2_info.yaml28
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_gem.py2
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_gio_mime.py14
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_gio_mime.yaml70
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_gitlab_group_access_token.py107
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_gitlab_project_access_token.py107
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_hana_query.py103
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_ini_file.py51
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_ipa_otptoken.py22
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_ipa_pwpolicy.py123
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_ipbase.py187
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_jenkins_build.py40
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_jenkins_build_info.py180
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_keycloak_authentication_required_actions.py835
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_keycloak_client_rolemapping.py7
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_keycloak_role.py370
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_keycloak_user.py354
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_keycloak_user_federation.py4
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_lvg_rename.py160
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_modprobe.py4
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_nmcli.py588
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_nomad_token.py222
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_npm.py38
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_opkg.py231
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_opkg.yaml142
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_pagerduty.py30
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_pagerduty_alert.py113
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_pkgin.py16
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_kvm.py164
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_snap.py8
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_storage_contents_info.py90
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_tasks_info.py10
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_template.py66
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_vm_info.py714
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_puppet.py211
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_puppet.yaml192
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_redhat_subscription.py4
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_redis_info.py36
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_rhsm_release.py14
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_rhsm_repository.py833
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_sap_task_list_execute.py91
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_sapcar_extract.py54
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_simpleinit_msb.py200
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_slack.py6
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_snap.py474
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_usb_facts.py105
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_xfconf.py296
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_xfconf.yaml185
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_xfconf_info.py163
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_xfconf_info.yaml83
-rw-r--r--ansible_collections/community/general/tests/unit/requirements.txt14
86 files changed, 9135 insertions, 1901 deletions
diff --git a/ansible_collections/community/general/tests/unit/mock/loader.py b/ansible_collections/community/general/tests/unit/mock/loader.py
index 948f4eecd..f7aff17c3 100644
--- a/ansible_collections/community/general/tests/unit/mock/loader.py
+++ b/ansible_collections/community/general/tests/unit/mock/loader.py
@@ -17,7 +17,7 @@ class DictDataLoader(DataLoader):
def __init__(self, file_mapping=None):
file_mapping = {} if file_mapping is None else file_mapping
- assert type(file_mapping) == dict
+ assert isinstance(file_mapping, dict)
super(DictDataLoader, self).__init__()
diff --git a/ansible_collections/community/general/tests/unit/plugins/become/helper.py b/ansible_collections/community/general/tests/unit/plugins/become/helper.py
index 9949e1bef..d2a7df97f 100644
--- a/ansible_collections/community/general/tests/unit/plugins/become/helper.py
+++ b/ansible_collections/community/general/tests/unit/plugins/become/helper.py
@@ -12,7 +12,7 @@ from ansible.plugins.loader import become_loader, get_shell_plugin
def call_become_plugin(task, var_options, cmd, executable=None):
- """Helper function to call become plugin simiarly on how Ansible itself handles this."""
+ """Helper function to call become plugin similarly on how Ansible itself handles this."""
plugin = become_loader.get(task['become_method'])
plugin.set_options(task_keys=task, var_options=var_options)
shell = get_shell_plugin(executable=executable)
diff --git a/ansible_collections/community/general/tests/unit/plugins/callback/test_loganalytics.py b/ansible_collections/community/general/tests/unit/plugins/callback/test_loganalytics.py
index f9fef3c5d..17932ed5f 100644
--- a/ansible_collections/community/general/tests/unit/plugins/callback/test_loganalytics.py
+++ b/ansible_collections/community/general/tests/unit/plugins/callback/test_loganalytics.py
@@ -12,6 +12,7 @@ from ansible_collections.community.general.plugins.callback.loganalytics import
from datetime import datetime
import json
+import sys
class TestAzureLogAnalytics(unittest.TestCase):
@@ -27,6 +28,10 @@ class TestAzureLogAnalytics(unittest.TestCase):
self.mock_host = Mock('MockHost')
self.mock_host.name = 'myhost'
+ # Add backward compatibility
+ if sys.version_info < (3, 2):
+ self.assertRegex = self.assertRegexpMatches
+
@patch('ansible_collections.community.general.plugins.callback.loganalytics.datetime')
@patch('ansible_collections.community.general.plugins.callback.loganalytics.open_url')
def test_overall(self, open_url_mock, mock_datetime):
@@ -62,5 +67,5 @@ class TestAzureLogAnalytics(unittest.TestCase):
args, kwargs = open_url_mock.call_args
headers = kwargs['headers']
- self.assertRegexpMatches(headers['Authorization'], r'^SharedKey 01234567-0123-0123-0123-01234567890a:.*=$')
+ self.assertRegex(headers['Authorization'], r'^SharedKey 01234567-0123-0123-0123-01234567890a:.*=$')
self.assertEqual(headers['Log-Type'], 'ansible_playbook')
diff --git a/ansible_collections/community/general/tests/unit/plugins/connection/test_lxc.py b/ansible_collections/community/general/tests/unit/plugins/connection/test_lxc.py
index 8733a92e0..bebd42772 100644
--- a/ansible_collections/community/general/tests/unit/plugins/connection/test_lxc.py
+++ b/ansible_collections/community/general/tests/unit/plugins/connection/test_lxc.py
@@ -6,20 +6,138 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+import pytest
+import sys
+
from io import StringIO
-from ansible_collections.community.general.tests.unit.compat import unittest
-from ansible_collections.community.general.plugins.connection import lxc
+from ansible.errors import AnsibleError
from ansible.playbook.play_context import PlayContext
+from ansible.plugins.loader import connection_loader
+from ansible_collections.community.general.tests.unit.compat import mock
+
+
+@pytest.fixture(autouse=True)
+def lxc(request):
+ """Fixture to import/load the lxc plugin module.
+
+ The fixture parameter is used to determine the presence of liblxc.
+ When true (default), a mocked liblxc module is injected. If False,
+ no liblxc will be present.
+ """
+ liblxc_present = getattr(request, 'param', True)
+
+ class ContainerMock():
+ # dict of container name to its state
+ _container_states = {}
+
+ def __init__(self, name):
+ super(ContainerMock, self).__init__()
+ self.name = name
+
+ @property
+ def state(self):
+ return ContainerMock._container_states.get(self.name, 'STARTED')
+
+ liblxc_module_mock = mock.MagicMock()
+ liblxc_module_mock.Container = ContainerMock
+
+ with mock.patch.dict('sys.modules'):
+ if liblxc_present:
+ sys.modules['lxc'] = liblxc_module_mock
+ elif 'lxc' in sys.modules:
+ del sys.modules['lxc']
+
+ from ansible_collections.community.general.plugins.connection import lxc as lxc_plugin_module
+
+ assert lxc_plugin_module.HAS_LIBLXC == liblxc_present
+ assert bool(getattr(lxc_plugin_module, '_lxc', None)) == liblxc_present
+
+ yield lxc_plugin_module
+
+
+class TestLXCConnectionClass():
+
+ @pytest.mark.parametrize('lxc', [True, False], indirect=True)
+ def test_lxc_connection_module(self, lxc):
+ """Test that a connection can be created with the plugin."""
+ play_context = PlayContext()
+ in_stream = StringIO()
+ conn = connection_loader.get('lxc', play_context, in_stream)
+ assert conn
+ assert isinstance(conn, lxc.Connection)
-class TestLXCConnectionClass(unittest.TestCase):
+ @pytest.mark.parametrize('lxc', [False], indirect=True)
+ def test_lxc_connection_liblxc_error(self, lxc):
+ """Test that on connect an error is thrown if liblxc is not present."""
+ play_context = PlayContext()
+ in_stream = StringIO()
+ conn = connection_loader.get('lxc', play_context, in_stream)
+
+ with pytest.raises(AnsibleError, match='lxc python bindings are not installed'):
+ conn._connect()
- def test_lxc_connection_module(self):
+ def test_remote_addr_option(self):
+ """Test that the remote_addr option is used"""
play_context = PlayContext()
- play_context.prompt = (
- '[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: '
- )
in_stream = StringIO()
+ conn = connection_loader.get('lxc', play_context, in_stream)
+
+ container_name = 'my-container'
+ conn.set_option('remote_addr', container_name)
+ assert conn.get_option('remote_addr') == container_name
+
+ conn._connect()
+ assert conn.container_name == container_name
+
+ def test_error_when_stopped(self, lxc):
+ """Test that on connect an error is thrown if the container is stopped."""
+ play_context = PlayContext()
+ in_stream = StringIO()
+ conn = connection_loader.get('lxc', play_context, in_stream)
+ conn.set_option('remote_addr', 'my-container')
+
+ lxc._lxc.Container._container_states['my-container'] = 'STOPPED'
+
+ with pytest.raises(AnsibleError, match='my-container is not running'):
+ conn._connect()
+
+ def test_container_name_change(self):
+ """Test connect method reconnects when remote_addr changes"""
+ play_context = PlayContext()
+ in_stream = StringIO()
+ conn = connection_loader.get('lxc', play_context, in_stream)
+
+ # setting the option does nothing
+ container1_name = 'my-container'
+ conn.set_option('remote_addr', container1_name)
+ assert conn.container_name is None
+ assert conn.container is None
+
+ # first call initializes the connection
+ conn._connect()
+ assert conn.container_name is container1_name
+ assert conn.container is not None
+ assert conn.container.name == container1_name
+ container1 = conn.container
+
+ # second call is basically a no-op
+ conn._connect()
+ assert conn.container_name is container1_name
+ assert conn.container is container1
+ assert conn.container.name == container1_name
+
+ # setting the option does again nothing
+ container2_name = 'my-other-container'
+ conn.set_option('remote_addr', container2_name)
+ assert conn.container_name == container1_name
+ assert conn.container is container1
+ assert conn.container.name == container1_name
- self.assertIsInstance(lxc.Connection(play_context, in_stream), lxc.Connection)
+ # first call with a different remote_addr changes the connection
+ conn._connect()
+ assert conn.container_name == container2_name
+ assert conn.container is not None
+ assert conn.container is not container1
+ assert conn.container.name == container2_name
diff --git a/ansible_collections/community/general/tests/unit/plugins/inventory/test_icinga2.py b/ansible_collections/community/general/tests/unit/plugins/inventory/test_icinga2.py
index e3928b0db..859f29d3b 100644
--- a/ansible_collections/community/general/tests/unit/plugins/inventory/test_icinga2.py
+++ b/ansible_collections/community/general/tests/unit/plugins/inventory/test_icinga2.py
@@ -86,6 +86,8 @@ def get_option(option):
return {}
elif option == 'strict':
return False
+ elif option == 'group_by_hostgroups':
+ return True
else:
return None
@@ -96,6 +98,7 @@ def test_populate(inventory, mocker):
inventory.icinga2_password = 'password'
inventory.icinga2_url = 'https://localhost:5665' + '/v1'
inventory.inventory_attr = "address"
+ inventory.group_by_hostgroups = True
# bypass authentication and API fetch calls
inventory._check_api = mocker.MagicMock(side_effect=check_api)
diff --git a/ansible_collections/community/general/tests/unit/plugins/inventory/test_linode.py b/ansible_collections/community/general/tests/unit/plugins/inventory/test_linode.py
index a4f556761..0f239f2dd 100644
--- a/ansible_collections/community/general/tests/unit/plugins/inventory/test_linode.py
+++ b/ansible_collections/community/general/tests/unit/plugins/inventory/test_linode.py
@@ -37,11 +37,25 @@ def test_missing_access_token_lookup(inventory):
assert 'Could not retrieve Linode access token' in error_message
-def test_verify_file(tmp_path, inventory):
+def test_verify_file_yml(tmp_path, inventory):
file = tmp_path / "foobar.linode.yml"
file.touch()
assert inventory.verify_file(str(file)) is True
+def test_verify_file_yaml(tmp_path, inventory):
+ file = tmp_path / "foobar.linode.yaml"
+ file.touch()
+ assert inventory.verify_file(str(file)) is True
+
+
+def test_verify_file_bad_config_yml(inventory):
+ assert inventory.verify_file("foobar.linode.yml") is False
+
+
+def test_verify_file_bad_config_yaml(inventory):
+ assert inventory.verify_file("foobar.linode.yaml") is False
+
+
def test_verify_file_bad_config(inventory):
- assert inventory.verify_file('foobar.linode.yml') is False
+ assert inventory.verify_file("foobar.wrongcloud.yml") is False
diff --git a/ansible_collections/community/general/tests/unit/plugins/inventory/test_proxmox.py b/ansible_collections/community/general/tests/unit/plugins/inventory/test_proxmox.py
index 13832c938..ea6c84bcd 100644
--- a/ansible_collections/community/general/tests/unit/plugins/inventory/test_proxmox.py
+++ b/ansible_collections/community/general/tests/unit/plugins/inventory/test_proxmox.py
@@ -646,13 +646,15 @@ def test_populate(inventory, mocker):
inventory.group_prefix = 'proxmox_'
inventory.facts_prefix = 'proxmox_'
inventory.strict = False
+ inventory.exclude_nodes = False
opts = {
'group_prefix': 'proxmox_',
'facts_prefix': 'proxmox_',
'want_facts': True,
'want_proxmox_nodes_ansible_host': True,
- 'qemu_extended_statuses': True
+ 'qemu_extended_statuses': True,
+ 'exclude_nodes': False
}
# bypass authentication and API fetch calls
@@ -723,13 +725,15 @@ def test_populate_missing_qemu_extended_groups(inventory, mocker):
inventory.group_prefix = 'proxmox_'
inventory.facts_prefix = 'proxmox_'
inventory.strict = False
+ inventory.exclude_nodes = False
opts = {
'group_prefix': 'proxmox_',
'facts_prefix': 'proxmox_',
'want_facts': True,
'want_proxmox_nodes_ansible_host': True,
- 'qemu_extended_statuses': False
+ 'qemu_extended_statuses': False,
+ 'exclude_nodes': False
}
# bypass authentication and API fetch calls
@@ -743,3 +747,40 @@ def test_populate_missing_qemu_extended_groups(inventory, mocker):
# make sure that ['prelaunch', 'paused'] are not in the group list
for group in ['paused', 'prelaunch']:
assert ('%sall_%s' % (inventory.group_prefix, group)) not in inventory.inventory.groups
+
+
+def test_populate_exclude_nodes(inventory, mocker):
+ # module settings
+ inventory.proxmox_user = 'root@pam'
+ inventory.proxmox_password = 'password'
+ inventory.proxmox_url = 'https://localhost:8006'
+ inventory.group_prefix = 'proxmox_'
+ inventory.facts_prefix = 'proxmox_'
+ inventory.strict = False
+ inventory.exclude_nodes = True
+
+ opts = {
+ 'group_prefix': 'proxmox_',
+ 'facts_prefix': 'proxmox_',
+ 'want_facts': True,
+ 'want_proxmox_nodes_ansible_host': True,
+ 'qemu_extended_statuses': False,
+ 'exclude_nodes': True
+ }
+
+ # bypass authentication and API fetch calls
+ inventory._get_auth = mocker.MagicMock(side_effect=get_auth)
+ inventory._get_json = mocker.MagicMock(side_effect=get_json)
+ inventory._get_vm_snapshots = mocker.MagicMock(side_effect=get_vm_snapshots)
+ inventory.get_option = mocker.MagicMock(side_effect=get_option(opts))
+ inventory._can_add_host = mocker.MagicMock(return_value=True)
+ inventory._populate()
+
+ # make sure that nodes are not in the inventory
+ for node in ['testnode', 'testnode2']:
+ assert node not in inventory.inventory.hosts
+ # make sure that nodes group is absent
+ assert ('%s_nodes' % (inventory.group_prefix)) not in inventory.inventory.groups
+ # make sure that nodes are not in the "ungrouped" group
+ for node in ['testnode', 'testnode2']:
+ assert node not in inventory.inventory.get_groups_dict()["ungrouped"]
diff --git a/ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_conftest.py b/ansible_collections/community/general/tests/unit/plugins/lookup/conftest.py
index 18afae1a3..d4ae42ab8 100644
--- a/ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_conftest.py
+++ b/ansible_collections/community/general/tests/unit/plugins/lookup/conftest.py
@@ -10,17 +10,11 @@ import pytest
from ansible_collections.community.general.plugins.lookup.onepassword import OnePass
-OP_VERSION_FIXTURES = [
- "opv1",
- "opv2"
-]
-
-
@pytest.fixture
def fake_op(mocker):
def _fake_op(version):
mocker.patch("ansible_collections.community.general.plugins.lookup.onepassword.OnePassCLIBase.get_current_version", return_value=version)
- op = OnePass(None, None, None, None, None)
+ op = OnePass()
op._config._config_file_path = "/home/jin/.op/config"
mocker.patch.object(op._cli, "_run")
diff --git a/ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_common.py b/ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_common.py
index 092979225..bf0cc35c1 100644
--- a/ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_common.py
+++ b/ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_common.py
@@ -81,5 +81,215 @@ MOCK_ENTRIES = {
"expected": ["first value"],
"output": load_file("v2_out_03.json")
},
+ {
+ # Request data from an omitted value (label lookup, no section)
+ "vault_name": "Test Vault",
+ "queries": ["Omitted values"],
+ "kwargs": {
+ "field": "label-without-value",
+ },
+ "expected": [""],
+ "output": load_file("v2_out_04.json")
+ },
+ {
+ # Request data from an omitted value (id lookup, no section)
+ "vault_name": "Test Vault",
+ "queries": ["Omitted values"],
+ "kwargs": {
+ "field": "67890q7mspf4x6zrlw3qejn7m",
+ },
+ "expected": [""],
+ "output": load_file("v2_out_04.json")
+ },
+ {
+ # Request data from an omitted value (label lookup, with section)
+ "vault_name": "Test Vault",
+ "queries": ["Omitted values"],
+ "kwargs": {
+ "field": "section-label-without-value",
+ "section": "Section-Without-Values"
+ },
+ "expected": [""],
+ "output": load_file("v2_out_04.json")
+ },
+ {
+ # Request data from an omitted value (id lookup, with section)
+ "vault_name": "Test Vault",
+ "queries": ["Omitted values"],
+ "kwargs": {
+ "field": "123345q7mspf4x6zrlw3qejn7m",
+ "section": "section-without-values",
+ },
+ "expected": [""],
+ "output": load_file("v2_out_04.json")
+ },
+ {
+ # Query item without section by lowercase id (case matching)
+ "vault_name": "Test Vault",
+ "queries": ["LabelCasing"],
+ "kwargs": {
+ "field": "lowercaseid",
+ },
+ "expected": ["lowercaseid"],
+ "output": load_file("v2_out_05.json")
+ },
+ {
+ # Query item without section by lowercase id (case not matching)
+ "vault_name": "Test Vault",
+ "queries": ["LabelCasing"],
+ "kwargs": {
+ "field": "LOWERCASEID",
+ },
+ "expected": ["lowercaseid"],
+ "output": load_file("v2_out_05.json")
+ },
+ {
+ # Query item without section by lowercase label (case matching)
+ "vault_name": "Test Vault",
+ "queries": ["LabelCasing"],
+ "kwargs": {
+ "field": "lowercaselabel",
+ },
+ "expected": ["lowercaselabel"],
+ "output": load_file("v2_out_05.json")
+ },
+ {
+ # Query item without section by lowercase label (case not matching)
+ "vault_name": "Test Vault",
+ "queries": ["LabelCasing"],
+ "kwargs": {
+ "field": "LOWERCASELABEL",
+ },
+ "expected": ["lowercaselabel"],
+ "output": load_file("v2_out_05.json")
+ },
+ {
+ # Query item without section by mixed case id (case matching)
+ "vault_name": "Test Vault",
+ "queries": ["LabelCasing"],
+ "kwargs": {
+ "field": "MiXeDcAsEiD",
+ },
+ "expected": ["mixedcaseid"],
+ "output": load_file("v2_out_05.json")
+ },
+ {
+ # Query item without section by mixed case id (case not matching)
+ "vault_name": "Test Vault",
+ "queries": ["LabelCasing"],
+ "kwargs": {
+ "field": "mixedcaseid",
+ },
+ "expected": ["mixedcaseid"],
+ "output": load_file("v2_out_05.json")
+ },
+ {
+ # Query item without section by mixed case label (case matching)
+ "vault_name": "Test Vault",
+ "queries": ["LabelCasing"],
+ "kwargs": {
+ "field": "MiXeDcAsElAbEl",
+ },
+ "expected": ["mixedcaselabel"],
+ "output": load_file("v2_out_05.json")
+ },
+ {
+ # Query item without section by mixed case label (case not matching)
+ "vault_name": "Test Vault",
+ "queries": ["LabelCasing"],
+ "kwargs": {
+ "field": "mixedcaselabel",
+ },
+ "expected": ["mixedcaselabel"],
+ "output": load_file("v2_out_05.json")
+ },
+ {
+ # Query item with section by lowercase id (case matching)
+ "vault_name": "Test Vault",
+ "queries": ["LabelCasing"],
+ "kwargs": {
+ "field": "sectionlowercaseid",
+ "section": "section-with-values",
+ },
+ "expected": ["sectionlowercaseid"],
+ "output": load_file("v2_out_05.json")
+ },
+ {
+ # Query item with section by lowercase id (case not matching)
+ "vault_name": "Test Vault",
+ "queries": ["LabelCasing"],
+ "kwargs": {
+ "field": "SECTIONLOWERCASEID",
+ "section": "section-with-values",
+ },
+ "expected": ["sectionlowercaseid"],
+ "output": load_file("v2_out_05.json")
+ },
+ {
+ # Query item with section by lowercase label (case matching)
+ "vault_name": "Test Vault",
+ "queries": ["LabelCasing"],
+ "kwargs": {
+ "field": "sectionlowercaselabel",
+ "section": "section-with-values",
+ },
+ "expected": ["sectionlowercaselabel"],
+ "output": load_file("v2_out_05.json")
+ },
+ {
+ # Query item with section by lowercase label (case not matching)
+ "vault_name": "Test Vault",
+ "queries": ["LabelCasing"],
+ "kwargs": {
+ "field": "SECTIONLOWERCASELABEL",
+ "section": "section-with-values",
+ },
+ "expected": ["sectionlowercaselabel"],
+ "output": load_file("v2_out_05.json")
+ },
+ {
+ # Query item with section by lowercase id (case matching)
+ "vault_name": "Test Vault",
+ "queries": ["LabelCasing"],
+ "kwargs": {
+ "field": "SeCtIoNmIxEdCaSeId",
+ "section": "section-with-values",
+ },
+ "expected": ["sectionmixedcaseid"],
+ "output": load_file("v2_out_05.json")
+ },
+ {
+ # Query item with section by lowercase id (case not matching)
+ "vault_name": "Test Vault",
+ "queries": ["LabelCasing"],
+ "kwargs": {
+ "field": "sectionmixedcaseid",
+ "section": "section-with-values",
+ },
+ "expected": ["sectionmixedcaseid"],
+ "output": load_file("v2_out_05.json")
+ },
+ {
+ # Query item with section by lowercase label (case matching)
+ "vault_name": "Test Vault",
+ "queries": ["LabelCasing"],
+ "kwargs": {
+ "field": "SeCtIoNmIxEdCaSeLaBeL",
+ "section": "section-with-values",
+ },
+ "expected": ["sectionmixedcaselabel"],
+ "output": load_file("v2_out_05.json")
+ },
+ {
+ # Query item with section by lowercase label (case not matching)
+ "vault_name": "Test Vault",
+ "queries": ["LabelCasing"],
+ "kwargs": {
+ "field": "sectionmixedcaselabel",
+ "section": "section-with-values",
+ },
+ "expected": ["sectionmixedcaselabel"],
+ "output": load_file("v2_out_05.json")
+ },
],
}
diff --git a/ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_fixtures/v2_out_01.json b/ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_fixtures/v2_out_01.json
index 7ef0bb0c2..0ace5c825 100644
--- a/ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_fixtures/v2_out_01.json
+++ b/ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_fixtures/v2_out_01.json
@@ -13,10 +13,10 @@
"additional_information": "Jan 18, 2015, 08:13:38",
"fields": [
{
- "id": "password",
+ "id": "Password",
"type": "CONCEALED",
"purpose": "PASSWORD",
- "label": "password",
+ "label": "Password",
"value": "OctoberPoppyNuttyDraperySabbath",
"reference": "op://Test Vault/Authy Backup/password",
"password_details": {
diff --git a/ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_fixtures/v2_out_04.json b/ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_fixtures/v2_out_04.json
new file mode 100644
index 000000000..13b6cc2aa
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_fixtures/v2_out_04.json
@@ -0,0 +1,67 @@
+{
+ "id": "bgqegp3xcxnpfkb45olwigpkpi",
+ "title": "OmittedValues",
+ "version": 1,
+ "vault": {
+ "id": "stpebbaccrq72xulgouxsk4p7y",
+ "name": "Private"
+ },
+ "category": "LOGIN",
+ "last_edited_by": "WOUTERRUYBH7BFPHMZ2KKGL6AU",
+ "created_at": "2023-09-12T08:30:07Z",
+ "updated_at": "2023-09-12T08:30:07Z",
+ "additional_information": "fluxility",
+ "sections": [
+ {
+ "id": "7osqcvd43i75teocdzbb6d7mie",
+ "label": "section-without-values"
+ }
+ ],
+ "fields": [
+ {
+ "id": "username",
+ "type": "STRING",
+ "purpose": "USERNAME",
+ "label": "username",
+ "value": "fluxility",
+ "reference": "op://Testcase/OmittedValues/username"
+ },
+ {
+ "id": "password",
+ "type": "CONCEALED",
+ "purpose": "PASSWORD",
+ "label": "password",
+ "value": "j89Dyb7psat*hkbkyLUQyq@GR.a-g2pQH_V_xtMhrn37rQ_2uRYoRiozj6TjWVLy2pbfEvjnse",
+ "entropy": 427.01202392578125,
+ "reference": "op://Testcase/OmittedValues/password",
+ "password_details": {
+ "entropy": 427,
+ "generated": true,
+ "strength": "FANTASTIC"
+ }
+ },
+ {
+ "id": "notesPlain",
+ "type": "STRING",
+ "purpose": "NOTES",
+ "label": "notesPlain",
+ "reference": "op://Testcase/OmittedValues/notesPlain"
+ },
+ {
+ "id": "67890q7mspf4x6zrlw3qejn7m",
+ "type": "URL",
+ "label": "label-without-value",
+ "reference": "op://01202392578125/OmittedValues/section-without-values/section-without-value"
+ },
+ {
+ "id": "123345q7mspf4x6zrlw3qejn7m",
+ "section": {
+ "id": "6hbtca5yrlmoptgy3nw74222",
+ "label": "section-without-values"
+ },
+ "type": "URL",
+ "label": "section-label-without-value",
+ "reference": "op://01202392578125/OmittedValues/section-without-values/section-without-value"
+ }
+ ]
+}
diff --git a/ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_fixtures/v2_out_04.json.license b/ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_fixtures/v2_out_04.json.license
new file mode 100644
index 000000000..969b956c2
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_fixtures/v2_out_04.json.license
@@ -0,0 +1,3 @@
+GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+SPDX-License-Identifier: GPL-3.0-or-later
+SPDX-FileCopyrightText: 2022, Ansible Project
diff --git a/ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_fixtures/v2_out_05.json b/ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_fixtures/v2_out_05.json
new file mode 100644
index 000000000..f925476e1
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_fixtures/v2_out_05.json
@@ -0,0 +1,102 @@
+{
+ "id": "bgqegp3xcxnpfkb45olwigpkpi",
+ "title": "LabelCasing",
+ "version": 1,
+ "vault": {
+ "id": "stpebbaccrq72xulgouxsk4p7y",
+ "name": "Private"
+ },
+ "category": "LOGIN",
+ "last_edited_by": "WOUTERRUYBH7BFPHMZ2KKGL6AU",
+ "created_at": "2023-09-12T08:30:07Z",
+ "updated_at": "2023-09-12T08:30:07Z",
+ "additional_information": "fluxility",
+ "sections": [
+ {
+ "id": "7osqcvd43i75teocdzbb6d7mie",
+ "label": "section-with-values"
+ }
+ ],
+ "fields": [
+ {
+ "id": "lowercaseid",
+ "type": "STRING",
+ "purpose": "USERNAME",
+ "label": "label0",
+ "value": "lowercaseid",
+ "reference": "op://Testcase/LabelCasing/lowercase"
+ },
+ {
+ "id": "MiXeDcAsEiD",
+ "type": "STRING",
+ "purpose": "USERNAME",
+ "label": "label1",
+ "value": "mixedcaseid",
+ "reference": "op://Testcase/LabelCasing/lowercase"
+ },
+ {
+ "id": "id1",
+ "type": "STRING",
+ "purpose": "USERNAME",
+ "label": "lowercaselabel",
+ "value": "lowercaselabel",
+ "reference": "op://Testcase/LabelCasing/lowercase"
+ },
+ {
+ "id": "id2",
+ "type": "STRING",
+ "purpose": "USERNAME",
+ "label": "MiXeDcAsElAbEl",
+ "value": "mixedcaselabel",
+ "reference": "op://Testcase/LabelCasing/lowercase"
+ },
+ {
+ "id": "sectionlowercaseid",
+ "type": "STRING",
+ "purpose": "USERNAME",
+ "label": "label2",
+ "value": "sectionlowercaseid",
+ "reference": "op://Testcase/LabelCasing/lowercase",
+ "section": {
+ "id": "7osqcvd43i75teocdzbb6d7mie",
+ "label": "section-with-values"
+ }
+ },
+ {
+ "id": "SeCtIoNmIxEdCaSeId",
+ "type": "STRING",
+ "purpose": "USERNAME",
+ "label": "label3",
+ "value": "sectionmixedcaseid",
+ "reference": "op://Testcase/LabelCasing/lowercase",
+ "section": {
+ "id": "7osqcvd43i75teocdzbb6d7mie",
+ "label": "section-with-values"
+ }
+ },
+ {
+ "id": "id3",
+ "type": "STRING",
+ "purpose": "USERNAME",
+ "label": "sectionlowercaselabel",
+ "value": "sectionlowercaselabel",
+ "reference": "op://Testcase/LabelCasing/lowercase",
+ "section": {
+ "id": "7osqcvd43i75teocdzbb6d7mie",
+ "label": "section-with-values"
+ }
+ },
+ {
+ "id": "id2",
+ "type": "STRING",
+ "purpose": "USERNAME",
+ "label": "SeCtIoNmIxEdCaSeLaBeL",
+ "value": "sectionmixedcaselabel",
+ "reference": "op://Testcase/LabelCasing/lowercase",
+ "section": {
+ "id": "7osqcvd43i75teocdzbb6d7mie",
+ "label": "section-with-values"
+ }
+ }
+ ]
+}
diff --git a/ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_fixtures/v2_out_05.json.license b/ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_fixtures/v2_out_05.json.license
new file mode 100644
index 000000000..969b956c2
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/lookup/onepassword_fixtures/v2_out_05.json.license
@@ -0,0 +1,3 @@
+GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+SPDX-License-Identifier: GPL-3.0-or-later
+SPDX-FileCopyrightText: 2022, Ansible Project
diff --git a/ansible_collections/community/general/tests/unit/plugins/lookup/test_bitwarden.py b/ansible_collections/community/general/tests/unit/plugins/lookup/test_bitwarden.py
index d45263965..9270dd44e 100644
--- a/ansible_collections/community/general/tests/unit/plugins/lookup/test_bitwarden.py
+++ b/ansible_collections/community/general/tests/unit/plugins/lookup/test_bitwarden.py
@@ -14,10 +14,13 @@ from ansible.module_utils import six
from ansible.plugins.loader import lookup_loader
from ansible_collections.community.general.plugins.lookup.bitwarden import Bitwarden
+MOCK_COLLECTION_ID = "3b12a9da-7c49-40b8-ad33-aede017a7ead"
MOCK_RECORDS = [
{
- "collectionIds": [],
+ "collectionIds": [
+ MOCK_COLLECTION_ID
+ ],
"deletedDate": None,
"favorite": False,
"fields": [
@@ -65,7 +68,9 @@ MOCK_RECORDS = [
"type": 1
},
{
- "collectionIds": [],
+ "collectionIds": [
+ MOCK_COLLECTION_ID
+ ],
"deletedDate": None,
"favorite": False,
"folderId": None,
@@ -85,7 +90,9 @@ MOCK_RECORDS = [
"type": 1
},
{
- "collectionIds": [],
+ "collectionIds": [
+ MOCK_COLLECTION_ID
+ ],
"deletedDate": None,
"favorite": False,
"folderId": None,
@@ -111,7 +118,10 @@ class MockBitwarden(Bitwarden):
unlocked = True
- def _get_matches(self, search_value, search_field="name", collection_id=None):
+ def _get_matches(self, search_value=None, search_field="name", collection_id=None):
+ if not search_value and collection_id:
+ return list(filter(lambda record: collection_id in record['collectionIds'], MOCK_RECORDS))
+
return list(filter(lambda record: record[search_field] == search_value, MOCK_RECORDS))
@@ -156,5 +166,32 @@ class TestLookupModule(unittest.TestCase):
def test_bitwarden_plugin_unlocked(self):
record = MOCK_RECORDS[0]
record_name = record['name']
- with self.assertRaises(AnsibleError):
+ with self.assertRaises(AnsibleError) as raised_error:
self.lookup.run([record_name], field='password')
+
+ self.assertEqual("Bitwarden Vault locked. Run 'bw unlock'.", str(raised_error.exception))
+
+ def test_bitwarden_plugin_without_session_option(self):
+ mock_bitwarden = MockBitwarden()
+ with patch("ansible_collections.community.general.plugins.lookup.bitwarden._bitwarden", mock_bitwarden):
+ record = MOCK_RECORDS[0]
+ record_name = record['name']
+ session = 'session'
+
+ self.lookup.run([record_name], field=None)
+ self.assertIsNone(mock_bitwarden.session)
+
+ def test_bitwarden_plugin_session_option(self):
+ mock_bitwarden = MockBitwarden()
+ with patch("ansible_collections.community.general.plugins.lookup.bitwarden._bitwarden", mock_bitwarden):
+ record = MOCK_RECORDS[0]
+ record_name = record['name']
+ session = 'session'
+
+ self.lookup.run([record_name], field=None, bw_session=session)
+ self.assertEqual(mock_bitwarden.session, session)
+
+ @patch('ansible_collections.community.general.plugins.lookup.bitwarden._bitwarden', new=MockBitwarden())
+ def test_bitwarden_plugin_full_collection(self):
+ # Try to retrieve the full records of the given collection.
+ self.assertEqual(MOCK_RECORDS, self.lookup.run(None, collection_id=MOCK_COLLECTION_ID)[0])
diff --git a/ansible_collections/community/general/tests/unit/plugins/lookup/test_bitwarden_secrets_manager.py b/ansible_collections/community/general/tests/unit/plugins/lookup/test_bitwarden_secrets_manager.py
new file mode 100644
index 000000000..aaeaf79ea
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/lookup/test_bitwarden_secrets_manager.py
@@ -0,0 +1,83 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2023, jantari (https://github.com/jantari)
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+from ansible_collections.community.general.tests.unit.compat import unittest
+from ansible_collections.community.general.tests.unit.compat.mock import patch
+
+from ansible.errors import AnsibleLookupError
+from ansible.plugins.loader import lookup_loader
+from ansible_collections.community.general.plugins.lookup.bitwarden_secrets_manager import BitwardenSecretsManager
+
+
+MOCK_SECRETS = [
+ {
+ "object": "secret",
+ "id": "ababc4a8-c242-4e54-bceb-77d17cdf2e07",
+ "organizationId": "3c33066c-a0bf-4e70-9a3c-24cda6aaddd5",
+ "projectId": "81869439-bfe5-442f-8b4e-b172e68b0ab2",
+ "key": "TEST_SECRET",
+ "value": "1234supersecret5678",
+ "note": "A test secret to use when developing the ansible bitwarden_secrets_manager lookup plugin",
+ "creationDate": "2023-04-23T13:13:37.7507017Z",
+ "revisionDate": "2023-04-23T13:13:37.7507017Z"
+ },
+ {
+ "object": "secret",
+ "id": "d4b7c8fa-fc95-40d7-a13c-6e186ee69d53",
+ "organizationId": "3c33066c-a0bf-4e70-9a3c-24cda6aaddd5",
+ "projectId": "81869439-bfe5-442f-8b4e-b172e68b0ab2",
+ "key": "TEST_SECRET_2",
+ "value": "abcd_such_secret_very_important_efgh",
+ "note": "notes go here",
+ "creationDate": "2023-04-23T13:26:44.0392906Z",
+ "revisionDate": "2023-04-23T13:26:44.0392906Z"
+ }
+]
+
+
+class MockBitwardenSecretsManager(BitwardenSecretsManager):
+
+ def _run(self, args, stdin=None):
+ # secret_id is the last argument passed to the bws CLI
+ secret_id = args[-1]
+ rc = 1
+ out = ""
+ err = ""
+ found_secrets = list(filter(lambda record: record["id"] == secret_id, MOCK_SECRETS))
+
+ if len(found_secrets) == 0:
+ err = "simulated bws CLI error: 404 no secret with such id"
+ elif len(found_secrets) == 1:
+ rc = 0
+ # The real bws CLI will only ever return one secret / json object for the "get secret <secret-id>" command
+ out = json.dumps(found_secrets[0])
+ else:
+ # This should never happen unless there's an error in the test MOCK_SECRETS.
+ # The real Bitwarden Secrets Manager assigns each secret a unique ID.
+ raise ValueError("More than 1 secret found with id: '{0}'. Impossible!".format(secret_id))
+
+ return out, err, rc
+
+
+class TestLookupModule(unittest.TestCase):
+
+ def setUp(self):
+ self.lookup = lookup_loader.get('community.general.bitwarden_secrets_manager')
+
+ @patch('ansible_collections.community.general.plugins.lookup.bitwarden_secrets_manager._bitwarden_secrets_manager', new=MockBitwardenSecretsManager())
+ def test_bitwarden_secrets_manager(self):
+ # Getting a secret by its id should return the full secret info
+ self.assertEqual([MOCK_SECRETS[0]], self.lookup.run(['ababc4a8-c242-4e54-bceb-77d17cdf2e07'], bws_access_token='123'))
+
+ @patch('ansible_collections.community.general.plugins.lookup.bitwarden_secrets_manager._bitwarden_secrets_manager', new=MockBitwardenSecretsManager())
+ def test_bitwarden_secrets_manager_no_match(self):
+ # Getting a nonexistent secret id throws exception
+ with self.assertRaises(AnsibleLookupError):
+ self.lookup.run(['nonexistant_id'], bws_access_token='123')
diff --git a/ansible_collections/community/general/tests/unit/plugins/lookup/test_github_app_access_token.py b/ansible_collections/community/general/tests/unit/plugins/lookup/test_github_app_access_token.py
new file mode 100644
index 000000000..4bf9c7e70
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/lookup/test_github_app_access_token.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2023, Poh Wei Sheng <weisheng-p@hotmail.sg>
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import json
+
+from ansible_collections.community.general.tests.unit.compat import unittest
+from ansible_collections.community.general.tests.unit.compat.mock import (
+ patch,
+ MagicMock,
+ mock_open
+)
+from ansible.plugins.loader import lookup_loader
+
+
+class MockJWT(MagicMock):
+ def encode(self, payload, key, alg):
+ return 'Foobar'
+
+
+class MockResponse(MagicMock):
+ response_token = 'Bar'
+
+ def read(self):
+ return json.dumps({
+ "token": self.response_token,
+ }).encode('utf-8')
+
+
+class TestLookupModule(unittest.TestCase):
+
+ def test_get_token(self):
+ with patch.multiple("ansible_collections.community.general.plugins.lookup.github_app_access_token",
+ open=mock_open(read_data="foo_bar"),
+ open_url=MagicMock(return_value=MockResponse()),
+ jwk_from_pem=MagicMock(return_value='private_key'),
+ jwt_instance=MockJWT(),
+ HAS_JWT=True):
+ lookup = lookup_loader.get('community.general.github_app_access_token')
+ self.assertListEqual(
+ [MockResponse.response_token],
+ lookup.run(
+ [],
+ key_path="key",
+ app_id="app_id",
+ installation_id="installation_id",
+ token_expiry=600
+ )
+ )
diff --git a/ansible_collections/community/general/tests/unit/plugins/lookup/test_merge_variables.py b/ansible_collections/community/general/tests/unit/plugins/lookup/test_merge_variables.py
index 5085797b3..66cb2f08b 100644
--- a/ansible_collections/community/general/tests/unit/plugins/lookup/test_merge_variables.py
+++ b/ansible_collections/community/general/tests/unit/plugins/lookup/test_merge_variables.py
@@ -24,7 +24,7 @@ class TestMergeVariablesLookup(unittest.TestCase):
self.merge_vars_lookup = merge_variables.LookupModule(loader=self.loader, templar=self.templar)
@patch.object(AnsiblePlugin, 'set_options')
- @patch.object(AnsiblePlugin, 'get_option', side_effect=[None, 'ignore', 'suffix'])
+ @patch.object(AnsiblePlugin, 'get_option', side_effect=[None, 'ignore', 'suffix', None])
@patch.object(Templar, 'template', side_effect=[['item1'], ['item3']])
def test_merge_list(self, mock_set_options, mock_get_option, mock_template):
results = self.merge_vars_lookup.run(['__merge_list'], {
@@ -36,7 +36,7 @@ class TestMergeVariablesLookup(unittest.TestCase):
self.assertEqual(results, [['item1', 'item3']])
@patch.object(AnsiblePlugin, 'set_options')
- @patch.object(AnsiblePlugin, 'get_option', side_effect=[['initial_item'], 'ignore', 'suffix'])
+ @patch.object(AnsiblePlugin, 'get_option', side_effect=[['initial_item'], 'ignore', 'suffix', None])
@patch.object(Templar, 'template', side_effect=[['item1'], ['item3']])
def test_merge_list_with_initial_value(self, mock_set_options, mock_get_option, mock_template):
results = self.merge_vars_lookup.run(['__merge_list'], {
@@ -48,7 +48,7 @@ class TestMergeVariablesLookup(unittest.TestCase):
self.assertEqual(results, [['initial_item', 'item1', 'item3']])
@patch.object(AnsiblePlugin, 'set_options')
- @patch.object(AnsiblePlugin, 'get_option', side_effect=[None, 'ignore', 'suffix'])
+ @patch.object(AnsiblePlugin, 'get_option', side_effect=[None, 'ignore', 'suffix', None])
@patch.object(Templar, 'template', side_effect=[{'item1': 'test', 'list_item': ['test1']},
{'item2': 'test', 'list_item': ['test2']}])
def test_merge_dict(self, mock_set_options, mock_get_option, mock_template):
@@ -73,7 +73,7 @@ class TestMergeVariablesLookup(unittest.TestCase):
@patch.object(AnsiblePlugin, 'set_options')
@patch.object(AnsiblePlugin, 'get_option', side_effect=[{'initial_item': 'random value', 'list_item': ['test0']},
- 'ignore', 'suffix'])
+ 'ignore', 'suffix', None])
@patch.object(Templar, 'template', side_effect=[{'item1': 'test', 'list_item': ['test1']},
{'item2': 'test', 'list_item': ['test2']}])
def test_merge_dict_with_initial_value(self, mock_set_options, mock_get_option, mock_template):
@@ -98,7 +98,7 @@ class TestMergeVariablesLookup(unittest.TestCase):
])
@patch.object(AnsiblePlugin, 'set_options')
- @patch.object(AnsiblePlugin, 'get_option', side_effect=[None, 'warn', 'suffix'])
+ @patch.object(AnsiblePlugin, 'get_option', side_effect=[None, 'warn', 'suffix', None])
@patch.object(Templar, 'template', side_effect=[{'item': 'value1'}, {'item': 'value2'}])
@patch.object(Display, 'warning')
def test_merge_dict_non_unique_warning(self, mock_set_options, mock_get_option, mock_template, mock_display):
@@ -111,7 +111,7 @@ class TestMergeVariablesLookup(unittest.TestCase):
self.assertEqual(results, [{'item': 'value2'}])
@patch.object(AnsiblePlugin, 'set_options')
- @patch.object(AnsiblePlugin, 'get_option', side_effect=[None, 'error', 'suffix'])
+ @patch.object(AnsiblePlugin, 'get_option', side_effect=[None, 'error', 'suffix', None])
@patch.object(Templar, 'template', side_effect=[{'item': 'value1'}, {'item': 'value2'}])
def test_merge_dict_non_unique_error(self, mock_set_options, mock_get_option, mock_template):
with self.assertRaises(AnsibleError):
@@ -121,7 +121,7 @@ class TestMergeVariablesLookup(unittest.TestCase):
})
@patch.object(AnsiblePlugin, 'set_options')
- @patch.object(AnsiblePlugin, 'get_option', side_effect=[None, 'ignore', 'suffix'])
+ @patch.object(AnsiblePlugin, 'get_option', side_effect=[None, 'ignore', 'suffix', None])
@patch.object(Templar, 'template', side_effect=[{'item1': 'test', 'list_item': ['test1']},
['item2', 'item3']])
def test_merge_list_and_dict(self, mock_set_options, mock_get_option, mock_template):
@@ -133,3 +133,150 @@ class TestMergeVariablesLookup(unittest.TestCase):
},
'testdict__merge_var': ['item2', 'item3']
})
+
+ @patch.object(AnsiblePlugin, 'set_options')
+ @patch.object(AnsiblePlugin, 'get_option', side_effect=[None, 'ignore', 'suffix', ['all']])
+ @patch.object(Templar, 'template', side_effect=[
+ {'var': [{'item1': 'value1', 'item2': 'value2'}]},
+ {'var': [{'item5': 'value5', 'item6': 'value6'}]},
+ ])
+ def test_merge_dict_group_all(self, mock_set_options, mock_get_option, mock_template):
+ results = self.merge_vars_lookup.run(['__merge_var'], {
+ 'inventory_hostname': 'host1',
+ 'hostvars': {
+ 'host1': {
+ 'group_names': ['dummy1'],
+ 'inventory_hostname': 'host1',
+ '1testlist__merge_var': {
+ 'var': [{'item1': 'value1', 'item2': 'value2'}]
+ }
+ },
+ 'host2': {
+ 'group_names': ['dummy1'],
+ 'inventory_hostname': 'host2',
+ '2otherlist__merge_var': {
+ 'var': [{'item5': 'value5', 'item6': 'value6'}]
+ }
+ }
+ }
+ })
+
+ self.assertEqual(results, [
+ {'var': [
+ {'item1': 'value1', 'item2': 'value2'},
+ {'item5': 'value5', 'item6': 'value6'}
+ ]}
+ ])
+
+ @patch.object(AnsiblePlugin, 'set_options')
+ @patch.object(AnsiblePlugin, 'get_option', side_effect=[None, 'ignore', 'suffix', ['dummy1']])
+ @patch.object(Templar, 'template', side_effect=[
+ {'var': [{'item1': 'value1', 'item2': 'value2'}]},
+ {'var': [{'item5': 'value5', 'item6': 'value6'}]},
+ ])
+ def test_merge_dict_group_single(self, mock_set_options, mock_get_option, mock_template):
+ results = self.merge_vars_lookup.run(['__merge_var'], {
+ 'inventory_hostname': 'host1',
+ 'hostvars': {
+ 'host1': {
+ 'group_names': ['dummy1'],
+ 'inventory_hostname': 'host1',
+ '1testlist__merge_var': {
+ 'var': [{'item1': 'value1', 'item2': 'value2'}]
+ }
+ },
+ 'host2': {
+ 'group_names': ['dummy1'],
+ 'inventory_hostname': 'host2',
+ '2otherlist__merge_var': {
+ 'var': [{'item5': 'value5', 'item6': 'value6'}]
+ }
+ },
+ 'host3': {
+ 'group_names': ['dummy2'],
+ 'inventory_hostname': 'host3',
+ '3otherlist__merge_var': {
+ 'var': [{'item3': 'value3', 'item4': 'value4'}]
+ }
+ }
+ }
+ })
+
+ self.assertEqual(results, [
+ {'var': [
+ {'item1': 'value1', 'item2': 'value2'},
+ {'item5': 'value5', 'item6': 'value6'}
+ ]}
+ ])
+
+ @patch.object(AnsiblePlugin, 'set_options')
+ @patch.object(AnsiblePlugin, 'get_option', side_effect=[None, 'ignore', 'suffix', ['dummy1', 'dummy2']])
+ @patch.object(Templar, 'template', side_effect=[
+ {'var': [{'item1': 'value1', 'item2': 'value2'}]},
+ {'var': [{'item5': 'value5', 'item6': 'value6'}]},
+ ])
+ def test_merge_dict_group_multiple(self, mock_set_options, mock_get_option, mock_template):
+ results = self.merge_vars_lookup.run(['__merge_var'], {
+ 'inventory_hostname': 'host1',
+ 'hostvars': {
+ 'host1': {
+ 'group_names': ['dummy1'],
+ 'inventory_hostname': 'host1',
+ '1testlist__merge_var': {
+ 'var': [{'item1': 'value1', 'item2': 'value2'}]
+ }
+ },
+ 'host2': {
+ 'group_names': ['dummy2'],
+ 'inventory_hostname': 'host2',
+ '2otherlist__merge_var': {
+ 'var': [{'item5': 'value5', 'item6': 'value6'}]
+ }
+ },
+ 'host3': {
+ 'group_names': ['dummy3'],
+ 'inventory_hostname': 'host3',
+ '3otherlist__merge_var': {
+ 'var': [{'item3': 'value3', 'item4': 'value4'}]
+ }
+ }
+ }
+ })
+
+ self.assertEqual(results, [
+ {'var': [
+ {'item1': 'value1', 'item2': 'value2'},
+ {'item5': 'value5', 'item6': 'value6'}
+ ]}
+ ])
+
+ @patch.object(AnsiblePlugin, 'set_options')
+ @patch.object(AnsiblePlugin, 'get_option', side_effect=[None, 'ignore', 'suffix', ['dummy1', 'dummy2']])
+ @patch.object(Templar, 'template', side_effect=[
+ ['item1'],
+ ['item5'],
+ ])
+ def test_merge_list_group_multiple(self, mock_set_options, mock_get_option, mock_template):
+ print()
+ results = self.merge_vars_lookup.run(['__merge_var'], {
+ 'inventory_hostname': 'host1',
+ 'hostvars': {
+ 'host1': {
+ 'group_names': ['dummy1'],
+ 'inventory_hostname': 'host1',
+ '1testlist__merge_var': ['item1']
+ },
+ 'host2': {
+ 'group_names': ['dummy2'],
+ 'inventory_hostname': 'host2',
+ '2otherlist__merge_var': ['item5']
+ },
+ 'host3': {
+ 'group_names': ['dummy3'],
+ 'inventory_hostname': 'host3',
+ '3otherlist__merge_var': ['item3']
+ }
+ }
+ })
+
+ self.assertEqual(results, [['item1', 'item5']])
diff --git a/ansible_collections/community/general/tests/unit/plugins/lookup/test_onepassword.py b/ansible_collections/community/general/tests/unit/plugins/lookup/test_onepassword.py
index ab7f3def2..dc00e5703 100644
--- a/ansible_collections/community/general/tests/unit/plugins/lookup/test_onepassword.py
+++ b/ansible_collections/community/general/tests/unit/plugins/lookup/test_onepassword.py
@@ -10,15 +10,9 @@ import itertools
import json
import pytest
-from .onepassword_conftest import ( # noqa: F401, pylint: disable=unused-import
- OP_VERSION_FIXTURES,
- fake_op,
- opv1,
- opv2,
-)
from .onepassword_common import MOCK_ENTRIES
-from ansible.errors import AnsibleLookupError
+from ansible.errors import AnsibleLookupError, AnsibleOptionsError
from ansible.plugins.loader import lookup_loader
from ansible_collections.community.general.plugins.lookup.onepassword import (
OnePassCLIv1,
@@ -26,6 +20,12 @@ from ansible_collections.community.general.plugins.lookup.onepassword import (
)
+OP_VERSION_FIXTURES = [
+ "opv1",
+ "opv2"
+]
+
+
@pytest.mark.parametrize(
("args", "rc", "expected_call_args", "expected_call_kwargs", "expected"),
(
@@ -82,6 +82,12 @@ def test_assert_logged_in_v2(mocker, args, out, expected_call_args, expected_cal
assert result == expected
+def test_assert_logged_in_v2_connect():
+ op_cli = OnePassCLIv2(connect_host="http://localhost:8080", connect_token="foobar")
+ result = op_cli.assert_logged_in()
+ assert result
+
+
def test_full_signin_v2(mocker):
mocker.patch.object(OnePassCLIv2, "_run", return_value=[0, "", ""])
@@ -264,5 +270,50 @@ def test_signin(op_fixture, request):
op = request.getfixturevalue(op_fixture)
op._cli.master_password = "master_pass"
op._cli.signin()
- print(op._cli.version)
op._cli._run.assert_called_once_with(['signin', '--raw'], command_input=b"master_pass")
+
+
+def test_op_doc(mocker):
+ document_contents = "Document Contents\n"
+
+ mocker.patch("ansible_collections.community.general.plugins.lookup.onepassword.OnePass.assert_logged_in", return_value=True)
+ mocker.patch("ansible_collections.community.general.plugins.lookup.onepassword.OnePassCLIBase._run", return_value=(0, document_contents, ""))
+
+ op_lookup = lookup_loader.get("community.general.onepassword_doc")
+ result = op_lookup.run(["Private key doc"])
+
+ assert result == [document_contents]
+
+
+@pytest.mark.parametrize(
+ ("plugin", "connect_host", "connect_token"),
+ [
+ (plugin, connect_host, connect_token)
+ for plugin in ("community.general.onepassword", "community.general.onepassword_raw")
+ for (connect_host, connect_token) in
+ (
+ ("http://localhost", None),
+ (None, "foobar"),
+ )
+ ]
+)
+def test_op_connect_partial_args(plugin, connect_host, connect_token, mocker):
+ op_lookup = lookup_loader.get(plugin)
+
+ mocker.patch("ansible_collections.community.general.plugins.lookup.onepassword.OnePass._get_cli_class", OnePassCLIv2)
+
+ with pytest.raises(AnsibleOptionsError):
+ op_lookup.run("login", vault_name="test vault", connect_host=connect_host, connect_token=connect_token)
+
+
+@pytest.mark.parametrize(
+ ("kwargs"),
+ (
+ {"connect_host": "http://localhost", "connect_token": "foobar"},
+ {"service_account_token": "foobar"},
+ )
+)
+def test_opv1_unsupported_features(kwargs):
+ op_cli = OnePassCLIv1(**kwargs)
+ with pytest.raises(AnsibleLookupError):
+ op_cli.full_signin()
diff --git a/ansible_collections/community/general/tests/unit/plugins/module_utils/hwc/test_hwc_utils.py b/ansible_collections/community/general/tests/unit/plugins/module_utils/hwc/test_hwc_utils.py
index 1344496b1..9b0be0bb4 100644
--- a/ansible_collections/community/general/tests/unit/plugins/module_utils/hwc/test_hwc_utils.py
+++ b/ansible_collections/community/general/tests/unit/plugins/module_utils/hwc/test_hwc_utils.py
@@ -6,11 +6,20 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+import sys
+
from ansible_collections.community.general.tests.unit.compat import unittest
from ansible_collections.community.general.plugins.module_utils.hwc_utils import (HwcModuleException, navigate_value)
class HwcUtilsTestCase(unittest.TestCase):
+ def setUp(self):
+ super(HwcUtilsTestCase, self).setUp()
+
+ # Add backward compatibility
+ if sys.version_info < (3, 0):
+ self.assertRaisesRegex = self.assertRaisesRegexp
+
def test_navigate_value(self):
value = {
'foo': {
@@ -29,12 +38,12 @@ class HwcUtilsTestCase(unittest.TestCase):
{"foo.quiet.trees": 1}),
1)
- self.assertRaisesRegexp(HwcModuleException,
- r".* key\(q\) is not exist in dict",
- navigate_value, value, ["foo", "q", "tree"])
+ self.assertRaisesRegex(HwcModuleException,
+ r".* key\(q\) is not exist in dict",
+ navigate_value, value, ["foo", "q", "tree"])
- self.assertRaisesRegexp(HwcModuleException,
- r".* the index is out of list",
- navigate_value, value,
- ["foo", "quiet", "trees"],
- {"foo.quiet.trees": 2})
+ self.assertRaisesRegex(HwcModuleException,
+ r".* the index is out of list",
+ navigate_value, value,
+ ["foo", "quiet", "trees"],
+ {"foo.quiet.trees": 2})
diff --git a/ansible_collections/community/general/tests/unit/plugins/module_utils/test_cmd_runner.py b/ansible_collections/community/general/tests/unit/plugins/module_utils/test_cmd_runner.py
index 7cec215a7..86576e8ce 100644
--- a/ansible_collections/community/general/tests/unit/plugins/module_utils/test_cmd_runner.py
+++ b/ansible_collections/community/general/tests/unit/plugins/module_utils/test_cmd_runner.py
@@ -11,44 +11,44 @@ from sys import version_info
import pytest
from ansible_collections.community.general.tests.unit.compat.mock import MagicMock, PropertyMock
-from ansible_collections.community.general.plugins.module_utils.cmd_runner import CmdRunner, fmt
+from ansible_collections.community.general.plugins.module_utils.cmd_runner import CmdRunner, cmd_runner_fmt
TC_FORMATS = dict(
- simple_boolean__true=(fmt.as_bool, ("--superflag",), True, ["--superflag"]),
- simple_boolean__false=(fmt.as_bool, ("--superflag",), False, []),
- simple_boolean__none=(fmt.as_bool, ("--superflag",), None, []),
- simple_boolean_both__true=(fmt.as_bool, ("--superflag", "--falseflag"), True, ["--superflag"]),
- simple_boolean_both__false=(fmt.as_bool, ("--superflag", "--falseflag"), False, ["--falseflag"]),
- simple_boolean_both__none=(fmt.as_bool, ("--superflag", "--falseflag"), None, ["--falseflag"]),
- simple_boolean_both__none_ig=(fmt.as_bool, ("--superflag", "--falseflag", True), None, []),
- simple_boolean_not__true=(fmt.as_bool_not, ("--superflag",), True, []),
- simple_boolean_not__false=(fmt.as_bool_not, ("--superflag",), False, ["--superflag"]),
- simple_boolean_not__none=(fmt.as_bool_not, ("--superflag",), None, ["--superflag"]),
- simple_optval__str=(fmt.as_optval, ("-t",), "potatoes", ["-tpotatoes"]),
- simple_optval__int=(fmt.as_optval, ("-t",), 42, ["-t42"]),
- simple_opt_val__str=(fmt.as_opt_val, ("-t",), "potatoes", ["-t", "potatoes"]),
- simple_opt_val__int=(fmt.as_opt_val, ("-t",), 42, ["-t", "42"]),
- simple_opt_eq_val__str=(fmt.as_opt_eq_val, ("--food",), "potatoes", ["--food=potatoes"]),
- simple_opt_eq_val__int=(fmt.as_opt_eq_val, ("--answer",), 42, ["--answer=42"]),
- simple_list_potato=(fmt.as_list, (), "literal_potato", ["literal_potato"]),
- simple_list_42=(fmt.as_list, (), 42, ["42"]),
- simple_map=(fmt.as_map, ({'a': 1, 'b': 2, 'c': 3},), 'b', ["2"]),
- simple_default_type__list=(fmt.as_default_type, ("list",), [1, 2, 3, 5, 8], ["--1", "--2", "--3", "--5", "--8"]),
- simple_default_type__bool_true=(fmt.as_default_type, ("bool", "what"), True, ["--what"]),
- simple_default_type__bool_false=(fmt.as_default_type, ("bool", "what"), False, []),
- simple_default_type__potato=(fmt.as_default_type, ("any-other-type", "potato"), "42", ["--potato", "42"]),
- simple_fixed_true=(fmt.as_fixed, [("--always-here", "--forever")], True, ["--always-here", "--forever"]),
- simple_fixed_false=(fmt.as_fixed, [("--always-here", "--forever")], False, ["--always-here", "--forever"]),
- simple_fixed_none=(fmt.as_fixed, [("--always-here", "--forever")], None, ["--always-here", "--forever"]),
- simple_fixed_str=(fmt.as_fixed, [("--always-here", "--forever")], "something", ["--always-here", "--forever"]),
+ simple_boolean__true=(cmd_runner_fmt.as_bool, ("--superflag",), True, ["--superflag"]),
+ simple_boolean__false=(cmd_runner_fmt.as_bool, ("--superflag",), False, []),
+ simple_boolean__none=(cmd_runner_fmt.as_bool, ("--superflag",), None, []),
+ simple_boolean_both__true=(cmd_runner_fmt.as_bool, ("--superflag", "--falseflag"), True, ["--superflag"]),
+ simple_boolean_both__false=(cmd_runner_fmt.as_bool, ("--superflag", "--falseflag"), False, ["--falseflag"]),
+ simple_boolean_both__none=(cmd_runner_fmt.as_bool, ("--superflag", "--falseflag"), None, ["--falseflag"]),
+ simple_boolean_both__none_ig=(cmd_runner_fmt.as_bool, ("--superflag", "--falseflag", True), None, []),
+ simple_boolean_not__true=(cmd_runner_fmt.as_bool_not, ("--superflag",), True, []),
+ simple_boolean_not__false=(cmd_runner_fmt.as_bool_not, ("--superflag",), False, ["--superflag"]),
+ simple_boolean_not__none=(cmd_runner_fmt.as_bool_not, ("--superflag",), None, ["--superflag"]),
+ simple_optval__str=(cmd_runner_fmt.as_optval, ("-t",), "potatoes", ["-tpotatoes"]),
+ simple_optval__int=(cmd_runner_fmt.as_optval, ("-t",), 42, ["-t42"]),
+ simple_opt_val__str=(cmd_runner_fmt.as_opt_val, ("-t",), "potatoes", ["-t", "potatoes"]),
+ simple_opt_val__int=(cmd_runner_fmt.as_opt_val, ("-t",), 42, ["-t", "42"]),
+ simple_opt_eq_val__str=(cmd_runner_fmt.as_opt_eq_val, ("--food",), "potatoes", ["--food=potatoes"]),
+ simple_opt_eq_val__int=(cmd_runner_fmt.as_opt_eq_val, ("--answer",), 42, ["--answer=42"]),
+ simple_list_potato=(cmd_runner_fmt.as_list, (), "literal_potato", ["literal_potato"]),
+ simple_list_42=(cmd_runner_fmt.as_list, (), 42, ["42"]),
+ simple_map=(cmd_runner_fmt.as_map, ({'a': 1, 'b': 2, 'c': 3},), 'b', ["2"]),
+ simple_default_type__list=(cmd_runner_fmt.as_default_type, ("list",), [1, 2, 3, 5, 8], ["--1", "--2", "--3", "--5", "--8"]),
+ simple_default_type__bool_true=(cmd_runner_fmt.as_default_type, ("bool", "what"), True, ["--what"]),
+ simple_default_type__bool_false=(cmd_runner_fmt.as_default_type, ("bool", "what"), False, []),
+ simple_default_type__potato=(cmd_runner_fmt.as_default_type, ("any-other-type", "potato"), "42", ["--potato", "42"]),
+ simple_fixed_true=(cmd_runner_fmt.as_fixed, [("--always-here", "--forever")], True, ["--always-here", "--forever"]),
+ simple_fixed_false=(cmd_runner_fmt.as_fixed, [("--always-here", "--forever")], False, ["--always-here", "--forever"]),
+ simple_fixed_none=(cmd_runner_fmt.as_fixed, [("--always-here", "--forever")], None, ["--always-here", "--forever"]),
+ simple_fixed_str=(cmd_runner_fmt.as_fixed, [("--always-here", "--forever")], "something", ["--always-here", "--forever"]),
)
if tuple(version_info) >= (3, 1):
from collections import OrderedDict
# needs OrderedDict to provide a consistent key order
TC_FORMATS["simple_default_type__dict"] = ( # type: ignore
- fmt.as_default_type,
+ cmd_runner_fmt.as_default_type,
("dict",),
OrderedDict((('a', 1), ('b', 2))),
["--a=1", "--b=2"]
@@ -76,11 +76,11 @@ TC_RUNNER = dict(
# param1=dict(
# type="int",
# value=11,
- # fmt_func=fmt.as_opt_eq_val,
+ # fmt_func=cmd_runner_fmt.as_opt_eq_val,
# fmt_arg="--answer",
# ),
# param2=dict(
- # fmt_func=fmt.as_bool,
+ # fmt_func=cmd_runner_fmt.as_bool,
# fmt_arg="--bb-here",
# )
# ),
@@ -119,8 +119,8 @@ TC_RUNNER = dict(
aa_bb=(
dict(
args_bundle=dict(
- aa=dict(type="int", value=11, fmt_func=fmt.as_opt_eq_val, fmt_arg="--answer"),
- bb=dict(fmt_func=fmt.as_bool, fmt_arg="--bb-here"),
+ aa=dict(type="int", value=11, fmt_func=cmd_runner_fmt.as_opt_eq_val, fmt_arg="--answer"),
+ bb=dict(fmt_func=cmd_runner_fmt.as_bool, fmt_arg="--bb-here"),
),
runner_init_args=dict(),
runner_ctx_args=dict(args_order=['aa', 'bb']),
@@ -137,8 +137,8 @@ TC_RUNNER = dict(
aa_bb_default_order=(
dict(
args_bundle=dict(
- aa=dict(type="int", value=11, fmt_func=fmt.as_opt_eq_val, fmt_arg="--answer"),
- bb=dict(fmt_func=fmt.as_bool, fmt_arg="--bb-here"),
+ aa=dict(type="int", value=11, fmt_func=cmd_runner_fmt.as_opt_eq_val, fmt_arg="--answer"),
+ bb=dict(fmt_func=cmd_runner_fmt.as_bool, fmt_arg="--bb-here"),
),
runner_init_args=dict(default_args_order=['bb', 'aa']),
runner_ctx_args=dict(),
@@ -155,8 +155,8 @@ TC_RUNNER = dict(
aa_bb_default_order_args_order=(
dict(
args_bundle=dict(
- aa=dict(type="int", value=11, fmt_func=fmt.as_opt_eq_val, fmt_arg="--answer"),
- bb=dict(fmt_func=fmt.as_bool, fmt_arg="--bb-here"),
+ aa=dict(type="int", value=11, fmt_func=cmd_runner_fmt.as_opt_eq_val, fmt_arg="--answer"),
+ bb=dict(fmt_func=cmd_runner_fmt.as_bool, fmt_arg="--bb-here"),
),
runner_init_args=dict(default_args_order=['bb', 'aa']),
runner_ctx_args=dict(args_order=['aa', 'bb']),
@@ -173,8 +173,8 @@ TC_RUNNER = dict(
aa_bb_dup_in_args_order=(
dict(
args_bundle=dict(
- aa=dict(type="int", value=11, fmt_func=fmt.as_opt_eq_val, fmt_arg="--answer"),
- bb=dict(fmt_func=fmt.as_bool, fmt_arg="--bb-here"),
+ aa=dict(type="int", value=11, fmt_func=cmd_runner_fmt.as_opt_eq_val, fmt_arg="--answer"),
+ bb=dict(fmt_func=cmd_runner_fmt.as_bool, fmt_arg="--bb-here"),
),
runner_init_args=dict(),
runner_ctx_args=dict(args_order=['aa', 'bb', 'aa']),
@@ -189,8 +189,8 @@ TC_RUNNER = dict(
aa_bb_process_output=(
dict(
args_bundle=dict(
- aa=dict(type="int", value=11, fmt_func=fmt.as_opt_eq_val, fmt_arg="--answer"),
- bb=dict(fmt_func=fmt.as_bool, fmt_arg="--bb-here"),
+ aa=dict(type="int", value=11, fmt_func=cmd_runner_fmt.as_opt_eq_val, fmt_arg="--answer"),
+ bb=dict(fmt_func=cmd_runner_fmt.as_bool, fmt_arg="--bb-here"),
),
runner_init_args=dict(default_args_order=['bb', 'aa']),
runner_ctx_args=dict(
@@ -209,8 +209,8 @@ TC_RUNNER = dict(
aa_bb_ignore_none_with_none=(
dict(
args_bundle=dict(
- aa=dict(type="int", value=49, fmt_func=fmt.as_opt_eq_val, fmt_arg="--answer"),
- bb=dict(fmt_func=fmt.as_bool, fmt_arg="--bb-here"),
+ aa=dict(type="int", value=49, fmt_func=cmd_runner_fmt.as_opt_eq_val, fmt_arg="--answer"),
+ bb=dict(fmt_func=cmd_runner_fmt.as_bool, fmt_arg="--bb-here"),
),
runner_init_args=dict(default_args_order=['bb', 'aa']),
runner_ctx_args=dict(
@@ -228,8 +228,8 @@ TC_RUNNER = dict(
aa_bb_ignore_not_none_with_none=(
dict(
args_bundle=dict(
- aa=dict(type="int", value=49, fmt_func=fmt.as_opt_eq_val, fmt_arg="--answer"),
- bb=dict(fmt_func=fmt.as_bool, fmt_arg="--bb-here"),
+ aa=dict(type="int", value=49, fmt_func=cmd_runner_fmt.as_opt_eq_val, fmt_arg="--answer"),
+ bb=dict(fmt_func=cmd_runner_fmt.as_bool, fmt_arg="--bb-here"),
),
runner_init_args=dict(default_args_order=['bb', 'aa']),
runner_ctx_args=dict(
@@ -247,8 +247,8 @@ TC_RUNNER = dict(
aa_bb_fixed=(
dict(
args_bundle=dict(
- aa=dict(type="int", value=11, fmt_func=fmt.as_opt_eq_val, fmt_arg="--answer"),
- bb=dict(fmt_func=fmt.as_fixed, fmt_arg=["fixed", "args"]),
+ aa=dict(type="int", value=11, fmt_func=cmd_runner_fmt.as_opt_eq_val, fmt_arg="--answer"),
+ bb=dict(fmt_func=cmd_runner_fmt.as_fixed, fmt_arg=["fixed", "args"]),
),
runner_init_args=dict(),
runner_ctx_args=dict(args_order=['aa', 'bb']),
@@ -265,8 +265,8 @@ TC_RUNNER = dict(
aa_bb_map=(
dict(
args_bundle=dict(
- aa=dict(type="int", value=11, fmt_func=fmt.as_opt_eq_val, fmt_arg="--answer"),
- bb=dict(fmt_func=fmt.as_map, fmt_arg={"v1": 111, "v2": 222}),
+ aa=dict(type="int", value=11, fmt_func=cmd_runner_fmt.as_opt_eq_val, fmt_arg="--answer"),
+ bb=dict(fmt_func=cmd_runner_fmt.as_map, fmt_arg={"v1": 111, "v2": 222}),
),
runner_init_args=dict(),
runner_ctx_args=dict(args_order=['aa', 'bb']),
@@ -283,8 +283,8 @@ TC_RUNNER = dict(
aa_bb_map_default=(
dict(
args_bundle=dict(
- aa=dict(type="int", value=11, fmt_func=fmt.as_opt_eq_val, fmt_arg="--answer"),
- bb=dict(fmt_func=fmt.as_map, fmt_arg={"v1": 111, "v2": 222}),
+ aa=dict(type="int", value=11, fmt_func=cmd_runner_fmt.as_opt_eq_val, fmt_arg="--answer"),
+ bb=dict(fmt_func=cmd_runner_fmt.as_map, fmt_arg={"v1": 111, "v2": 222}),
),
runner_init_args=dict(),
runner_ctx_args=dict(args_order=['aa', 'bb']),
diff --git a/ansible_collections/community/general/tests/unit/plugins/module_utils/test_module_helper.py b/ansible_collections/community/general/tests/unit/plugins/module_utils/test_module_helper.py
index 3d8a4b654..b2cd58690 100644
--- a/ansible_collections/community/general/tests/unit/plugins/module_utils/test_module_helper.py
+++ b/ansible_collections/community/general/tests/unit/plugins/module_utils/test_module_helper.py
@@ -10,88 +10,10 @@ __metaclass__ = type
import pytest
from ansible_collections.community.general.plugins.module_utils.module_helper import (
- ArgFormat, DependencyCtxMgr, VarMeta, VarDict, cause_changes
+ DependencyCtxMgr, VarMeta, VarDict, cause_changes
)
-def single_lambda_2star(x, y, z):
- return ["piggies=[{0},{1},{2}]".format(x, y, z)]
-
-
-ARG_FORMATS = dict(
- simple_boolean_true=("--superflag", ArgFormat.BOOLEAN, 0,
- True, ["--superflag"]),
- simple_boolean_false=("--superflag", ArgFormat.BOOLEAN, 0,
- False, []),
- simple_boolean_none=("--superflag", ArgFormat.BOOLEAN, 0,
- None, []),
- simple_boolean_not_true=("--superflag", ArgFormat.BOOLEAN_NOT, 0,
- True, []),
- simple_boolean_not_false=("--superflag", ArgFormat.BOOLEAN_NOT, 0,
- False, ["--superflag"]),
- simple_boolean_not_none=("--superflag", ArgFormat.BOOLEAN_NOT, 0,
- None, ["--superflag"]),
- single_printf=("--param=%s", ArgFormat.PRINTF, 0,
- "potatoes", ["--param=potatoes"]),
- single_printf_no_substitution=("--param", ArgFormat.PRINTF, 0,
- "potatoes", ["--param"]),
- single_printf_none=("--param=%s", ArgFormat.PRINTF, 0,
- None, []),
- multiple_printf=(["--param", "free-%s"], ArgFormat.PRINTF, 0,
- "potatoes", ["--param", "free-potatoes"]),
- single_format=("--param={0}", ArgFormat.FORMAT, 0,
- "potatoes", ["--param=potatoes"]),
- single_format_none=("--param={0}", ArgFormat.FORMAT, 0,
- None, []),
- single_format_no_substitution=("--param", ArgFormat.FORMAT, 0,
- "potatoes", ["--param"]),
- multiple_format=(["--param", "free-{0}"], ArgFormat.FORMAT, 0,
- "potatoes", ["--param", "free-potatoes"]),
- multiple_format_none=(["--param", "free-{0}"], ArgFormat.FORMAT, 0,
- None, []),
- single_lambda_0star=((lambda v: ["piggies=[{0},{1},{2}]".format(v[0], v[1], v[2])]), None, 0,
- ['a', 'b', 'c'], ["piggies=[a,b,c]"]),
- single_lambda_0star_none=((lambda v: ["piggies=[{0},{1},{2}]".format(v[0], v[1], v[2])]), None, 0,
- None, []),
- single_lambda_1star=((lambda a, b, c: ["piggies=[{0},{1},{2}]".format(a, b, c)]), None, 1,
- ['a', 'b', 'c'], ["piggies=[a,b,c]"]),
- single_lambda_1star_none=((lambda a, b, c: ["piggies=[{0},{1},{2}]".format(a, b, c)]), None, 1,
- None, []),
- single_lambda_2star=(single_lambda_2star, None, 2,
- dict(z='c', x='a', y='b'), ["piggies=[a,b,c]"]),
- single_lambda_2star_none=(single_lambda_2star, None, 2,
- None, []),
-)
-ARG_FORMATS_IDS = sorted(ARG_FORMATS.keys())
-
-
-@pytest.mark.parametrize('fmt, style, stars, value, expected',
- (ARG_FORMATS[tc] for tc in ARG_FORMATS_IDS),
- ids=ARG_FORMATS_IDS)
-def test_arg_format(fmt, style, stars, value, expected):
- af = ArgFormat('name', fmt, style, stars)
- actual = af.to_text(value)
- print("formatted string = {0}".format(actual))
- assert actual == expected, "actual = {0}".format(actual)
-
-
-ARG_FORMATS_FAIL = dict(
- int_fmt=(3, None, 0, "", [""]),
- bool_fmt=(True, None, 0, "", [""]),
-)
-ARG_FORMATS_FAIL_IDS = sorted(ARG_FORMATS_FAIL.keys())
-
-
-@pytest.mark.parametrize('fmt, style, stars, value, expected',
- (ARG_FORMATS_FAIL[tc] for tc in ARG_FORMATS_FAIL_IDS),
- ids=ARG_FORMATS_FAIL_IDS)
-def test_arg_format_fail(fmt, style, stars, value, expected):
- with pytest.raises(TypeError):
- af = ArgFormat('name', fmt, style, stars)
- actual = af.to_text(value)
- print("formatted string = {0}".format(actual))
-
-
def test_dependency_ctxmgr():
ctx = DependencyCtxMgr("POTATOES", "Potatoes must be installed")
with ctx:
diff --git a/ansible_collections/community/general/tests/unit/plugins/module_utils/test_vardict.py b/ansible_collections/community/general/tests/unit/plugins/module_utils/test_vardict.py
new file mode 100644
index 000000000..01d710b44
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/module_utils/test_vardict.py
@@ -0,0 +1,134 @@
+# -*- coding: utf-8 -*-
+# (c) 2023, Alexei Znamensky <russoz@gmail.com>
+# Copyright (c) 2023 Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+from ansible_collections.community.general.plugins.module_utils.vardict import VarDict
+
+
+def test_var_simple():
+ vd = VarDict()
+ vd["a"] = 123
+
+ var = vd._var("a")
+ assert var.output is True
+ assert var.diff is False
+ assert var.change is False
+ assert var.fact is False
+ assert var.initial_value == 123
+ assert var.value == 123
+
+ vd.a = 456
+ assert var.output is True
+ assert var.diff is False
+ assert var.change is False
+ assert var.fact is False
+ assert var.initial_value == 123
+ assert var.value == 456
+
+
+def test_var_diff_scalar():
+ vd = VarDict()
+ vd.set("aa", 123, diff=True)
+
+ var = vd._var("aa")
+ assert var.output is True
+ assert var.diff is True
+ assert var.change is True
+ assert var.fact is False
+ assert var.initial_value == 123
+ assert var.value == 123
+ assert vd.diff() is None
+
+ vd.aa = 456
+ assert var.output is True
+ assert var.diff is True
+ assert var.change is True
+ assert var.fact is False
+ assert var.initial_value == 123
+ assert var.value == 456
+ assert vd.diff() == {"before": {"aa": 123}, "after": {"aa": 456}}, "actual={0}".format(vd.diff())
+
+
+def test_var_diff_dict():
+ val_before = dict(x=0, y=10, z=10)
+ val_after = dict(x=0, y=30, z=10)
+
+ vd = VarDict()
+ vd.set("dd", val_before, diff=True)
+
+ var = vd._var("dd")
+ assert var.output is True
+ assert var.diff is True
+ assert var.change is True
+ assert var.fact is False
+ assert var.initial_value == val_before
+ assert var.value == val_before
+ assert vd.diff() is None
+
+ vd.dd = val_after
+ assert var.output is True
+ assert var.diff is True
+ assert var.change is True
+ assert var.fact is False
+ assert var.initial_value == val_before
+ assert var.value == val_after
+ assert vd.diff() == {"before": {"dd": val_before}, "after": {"dd": val_after}}, "actual={0}".format(vd.diff())
+
+ vd.set("aa", 123, diff=True)
+ vd.aa = 456
+ assert vd.diff() == {"before": {"aa": 123, "dd": val_before}, "after": {"aa": 456, "dd": val_after}}, "actual={0}".format(vd.diff())
+
+
+def test_vardict_set_meta():
+ vd = VarDict()
+ vd["jj"] = 123
+
+ var = vd._var("jj")
+ assert var.output is True
+ assert var.diff is False
+ assert var.change is False
+ assert var.fact is False
+ assert var.initial_value == 123
+ assert var.value == 123
+
+ vd.set_meta("jj", diff=True)
+ assert var.diff is True
+ assert var.change is True
+
+ vd.set_meta("jj", diff=False)
+ assert var.diff is False
+ assert var.change is False
+
+ vd.set_meta("jj", change=False)
+ vd.set_meta("jj", diff=True)
+ assert var.diff is True
+ assert var.change is False
+
+
+def test_vardict_change():
+ vd = VarDict()
+ vd.set("xx", 123, change=True)
+ vd.set("yy", 456, change=True)
+ vd.set("zz", 789, change=True)
+
+ vd.xx = 123
+ vd.yy = 456
+ assert vd.has_changed is False
+ vd.xx = 12345
+ assert vd.has_changed is True
+
+
+def test_vardict_dict():
+ vd = VarDict()
+ vd.set("xx", 123)
+ vd.set("yy", 456)
+ vd.set("zz", 789)
+
+ assert vd.as_dict() == {"xx": 123, "yy": 456, "zz": 789}
+ assert vd.get_meta("xx") == {"output": True, "change": False, "diff": False, "fact": False, "verbosity": 0}
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/gitlab.py b/ansible_collections/community/general/tests/unit/plugins/modules/gitlab.py
index c64d99fff..7a52dc355 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/gitlab.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/gitlab.py
@@ -284,6 +284,36 @@ def resp_delete_group(url, request):
return response(204, content, headers, None, 5, request)
+@urlmatch(scheme="http", netloc="localhost", path="/api/v4/groups/1/access_tokens", method="get")
+def resp_list_group_access_tokens(url, request):
+ headers = {'content-type': 'application/json'}
+ content = ('[{"user_id" : 1, "scopes" : ["api"], "name" : "token1", "expires_at" : "2021-01-31",'
+ '"id" : 1, "active" : false, "created_at" : "2021-01-20T22:11:48.151Z", "revoked" : true,'
+ '"access_level": 40},{"user_id" : 2, "scopes" : ["api"], "name" : "token2", "expires_at" : "2021-02-31",'
+ '"id" : 2, "active" : true, "created_at" : "2021-02-20T22:11:48.151Z", "revoked" : false,'
+ '"access_level": 40}]')
+ content = content.encode("utf-8")
+ return response(200, content, headers, None, 5, request)
+
+
+@urlmatch(scheme="http", netloc="localhost", path="/api/v4/groups/1/access_tokens", method="post")
+def resp_create_group_access_tokens(url, request):
+ headers = {'content-type': 'application/json'}
+ content = ('{"user_id" : 1, "scopes" : ["api"], "name" : "token1", "expires_at" : "2021-01-31",'
+ '"id" : 1, "active" : false, "created_at" : "2021-01-20T22:11:48.151Z", "revoked" : true,'
+ '"access_level": 40, "token": "Der423FErcdv35qEEWc"}')
+ content = content.encode("utf-8")
+ return response(201, content, headers, None, 5, request)
+
+
+@urlmatch(scheme="http", netloc="localhost", path="/api/v4/groups/1/access_tokens/1", method="delete")
+def resp_revoke_group_access_tokens(url, request):
+ headers = {'content-type': 'application/json'}
+ content = ('')
+ content = content.encode("utf-8")
+ return response(204, content, headers, None, 5, request)
+
+
'''
GROUP MEMBER API
'''
@@ -534,6 +564,36 @@ def resp_delete_protected_branch(url, request):
return response(204, content, headers, None, 5, request)
+@urlmatch(scheme="http", netloc="localhost", path="/api/v4/projects/1/access_tokens", method="get")
+def resp_list_project_access_tokens(url, request):
+ headers = {'content-type': 'application/json'}
+ content = ('[{"user_id" : 1, "scopes" : ["api"], "name" : "token1", "expires_at" : "2021-01-31",'
+ '"id" : 1, "active" : false, "created_at" : "2021-01-20T22:11:48.151Z", "revoked" : true,'
+ '"access_level": 40},{"user_id" : 2, "scopes" : ["api"], "name" : "token2", "expires_at" : "2021-02-31",'
+ '"id" : 2, "active" : true, "created_at" : "2021-02-20T22:11:48.151Z", "revoked" : false,'
+ '"access_level": 40}]')
+ content = content.encode("utf-8")
+ return response(200, content, headers, None, 5, request)
+
+
+@urlmatch(scheme="http", netloc="localhost", path="/api/v4/projects/1/access_tokens", method="post")
+def resp_create_project_access_tokens(url, request):
+ headers = {'content-type': 'application/json'}
+ content = ('{"user_id" : 1, "scopes" : ["api"], "name" : "token1", "expires_at" : "2021-01-31",'
+ '"id" : 1, "active" : false, "created_at" : "2021-01-20T22:11:48.151Z", "revoked" : true,'
+ '"access_level": 40, "token": "Der423FErcdv35qEEWc"}')
+ content = content.encode("utf-8")
+ return response(201, content, headers, None, 5, request)
+
+
+@urlmatch(scheme="http", netloc="localhost", path="/api/v4/projects/1/access_tokens/1", method="delete")
+def resp_revoke_project_access_tokens(url, request):
+ headers = {'content-type': 'application/json'}
+ content = ('')
+ content = content.encode("utf-8")
+ return response(204, content, headers, None, 5, request)
+
+
'''
HOOK API
'''
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/helper.py b/ansible_collections/community/general/tests/unit/plugins/modules/helper.py
new file mode 100644
index 000000000..a7322bf4d
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/helper.py
@@ -0,0 +1,181 @@
+# Copyright (c) Ansible project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import sys
+import json
+from collections import namedtuple
+from itertools import chain, repeat
+
+import pytest
+import yaml
+
+
+ModuleTestCase = namedtuple("ModuleTestCase", ["id", "input", "output", "run_command_calls", "flags"])
+RunCmdCall = namedtuple("RunCmdCall", ["command", "environ", "rc", "out", "err"])
+
+
+class _BaseContext(object):
+ def __init__(self, helper, testcase, mocker, capfd):
+ self.helper = helper
+ self.testcase = testcase
+ self.mocker = mocker
+ self.capfd = capfd
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ return False
+
+ def _run(self):
+ with pytest.raises(SystemExit):
+ self.helper.module_main()
+
+ out, err = self.capfd.readouterr()
+ results = json.loads(out)
+
+ self.check_results(results)
+
+ def test_flags(self, flag=None):
+ flags = self.testcase.flags
+ if flag:
+ flags = flags.get(flag)
+ return flags
+
+ def run(self):
+ func = self._run
+
+ test_flags = self.test_flags()
+ if test_flags.get("skip"):
+ pytest.skip()
+ if test_flags.get("xfail"):
+ pytest.xfail()
+
+ func()
+
+ def check_results(self, results):
+ print("testcase =\n%s" % str(self.testcase))
+ print("results =\n%s" % results)
+ if 'exception' in results:
+ print("exception = \n%s" % results["exception"])
+
+ for test_result in self.testcase.output:
+ assert results[test_result] == self.testcase.output[test_result], \
+ "'{0}': '{1}' != '{2}'".format(test_result, results[test_result], self.testcase.output[test_result])
+
+
+class _RunCmdContext(_BaseContext):
+ def __init__(self, *args, **kwargs):
+ super(_RunCmdContext, self).__init__(*args, **kwargs)
+ self.run_cmd_calls = self.testcase.run_command_calls
+ self.mock_run_cmd = self._make_mock_run_cmd()
+
+ def _make_mock_run_cmd(self):
+ call_results = [(x.rc, x.out, x.err) for x in self.run_cmd_calls]
+ error_call_results = (123,
+ "OUT: testcase has not enough run_command calls",
+ "ERR: testcase has not enough run_command calls")
+ mock_run_command = self.mocker.patch('ansible.module_utils.basic.AnsibleModule.run_command',
+ side_effect=chain(call_results, repeat(error_call_results)))
+ return mock_run_command
+
+ def check_results(self, results):
+ super(_RunCmdContext, self).check_results(results)
+ call_args_list = [(item[0][0], item[1]) for item in self.mock_run_cmd.call_args_list]
+ expected_call_args_list = [(item.command, item.environ) for item in self.run_cmd_calls]
+ print("call args list =\n%s" % call_args_list)
+ print("expected args list =\n%s" % expected_call_args_list)
+
+ assert self.mock_run_cmd.call_count == len(self.run_cmd_calls), "{0} != {1}".format(self.mock_run_cmd.call_count, len(self.run_cmd_calls))
+ if self.mock_run_cmd.call_count:
+ assert call_args_list == expected_call_args_list
+
+
+class Helper(object):
+ @staticmethod
+ def from_list(module_main, list_):
+ helper = Helper(module_main, test_cases=list_)
+ return helper
+
+ @staticmethod
+ def from_file(module_main, filename):
+ with open(filename, "r") as test_cases:
+ helper = Helper(module_main, test_cases=test_cases)
+ return helper
+
+ @staticmethod
+ def from_module(module, test_module_name):
+ basename = module.__name__.split(".")[-1]
+ test_spec = "tests/unit/plugins/modules/test_{0}.yaml".format(basename)
+ helper = Helper.from_file(module.main, test_spec)
+
+ setattr(sys.modules[test_module_name], "patch_bin", helper.cmd_fixture)
+ setattr(sys.modules[test_module_name], "test_module", helper.test_module)
+
+ def __init__(self, module_main, test_cases):
+ self.module_main = module_main
+ self._test_cases = test_cases
+ if isinstance(test_cases, (list, tuple)):
+ self.testcases = test_cases
+ else:
+ self.testcases = self._make_test_cases()
+
+ @property
+ def cmd_fixture(self):
+ @pytest.fixture
+ def patch_bin(mocker):
+ def mockie(self, path, *args, **kwargs):
+ return "/testbin/{0}".format(path)
+ mocker.patch('ansible.module_utils.basic.AnsibleModule.get_bin_path', mockie)
+
+ return patch_bin
+
+ def _make_test_cases(self):
+ test_cases = yaml.safe_load(self._test_cases)
+
+ results = []
+ for tc in test_cases:
+ for tc_param in ["input", "output", "flags"]:
+ if not tc.get(tc_param):
+ tc[tc_param] = {}
+ if tc.get("run_command_calls"):
+ tc["run_command_calls"] = [RunCmdCall(**r) for r in tc["run_command_calls"]]
+ else:
+ tc["run_command_calls"] = []
+ results.append(ModuleTestCase(**tc))
+
+ return results
+
+ @property
+ def testcases_params(self):
+ return [[x.input, x] for x in self.testcases]
+
+ @property
+ def testcases_ids(self):
+ return [item.id for item in self.testcases]
+
+ def __call__(self, *args, **kwargs):
+ return _RunCmdContext(self, *args, **kwargs)
+
+ @property
+ def test_module(self):
+ helper = self
+
+ @pytest.mark.parametrize('patch_ansible_module, testcase',
+ helper.testcases_params, ids=helper.testcases_ids,
+ indirect=['patch_ansible_module'])
+ @pytest.mark.usefixtures('patch_ansible_module')
+ def _test_module(mocker, capfd, patch_bin, testcase):
+ """
+ Run unit tests for test cases listed in TEST_CASES
+ """
+
+ with helper(testcase, mocker, capfd) as testcase_context:
+ testcase_context.run()
+
+ return _test_module
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_cpanm.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_cpanm.py
index 5367a1fab..4eecf000f 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_cpanm.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_cpanm.py
@@ -12,282 +12,9 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import json
from ansible_collections.community.general.plugins.modules import cpanm
+from .helper import Helper
-import pytest
-TESTED_MODULE = cpanm.__name__
-
-
-@pytest.fixture
-def patch_cpanm(mocker):
- """
- Function used for mocking some parts of redhat_subscription module
- """
- mocker.patch('ansible.module_utils.basic.AnsibleModule.get_bin_path',
- return_value='/testbin/cpanm')
-
-
-TEST_CASES = [
- [
- {'name': 'Dancer'},
- {
- 'id': 'install_dancer_compatibility',
- 'run_command.calls': [
- (
- ['/testbin/cpanm', '-le', 'use Dancer;'],
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- (2, '', 'error, not installed',), # output rc, out, err
- ),
- (
- ['/testbin/cpanm', 'Dancer'],
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': True},
- (0, '', '',), # output rc, out, err
- ),
- ],
- 'changed': True,
- }
- ],
- [
- {'name': 'Dancer'},
- {
- 'id': 'install_dancer_already_installed_compatibility',
- 'run_command.calls': [
- (
- ['/testbin/cpanm', '-le', 'use Dancer;'],
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- (0, '', '',), # output rc, out, err
- ),
- ],
- 'changed': False,
- }
- ],
- [
- {'name': 'Dancer', 'mode': 'new'},
- {
- 'id': 'install_dancer',
- 'run_command.calls': [(
- ['/testbin/cpanm', 'Dancer'],
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': True},
- (0, '', '',), # output rc, out, err
- )],
- 'changed': True,
- }
- ],
- [
- {'name': 'MIYAGAWA/Plack-0.99_05.tar.gz'},
- {
- 'id': 'install_distribution_file_compatibility',
- 'run_command.calls': [(
- ['/testbin/cpanm', 'MIYAGAWA/Plack-0.99_05.tar.gz'],
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': True},
- (0, '', '',), # output rc, out, err
- )],
- 'changed': True,
- }
- ],
- [
- {'name': 'MIYAGAWA/Plack-0.99_05.tar.gz', 'mode': 'new'},
- {
- 'id': 'install_distribution_file',
- 'run_command.calls': [(
- ['/testbin/cpanm', 'MIYAGAWA/Plack-0.99_05.tar.gz'],
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': True},
- (0, '', '',), # output rc, out, err
- )],
- 'changed': True,
- }
- ],
- [
- {'name': 'Dancer', 'locallib': '/srv/webapps/my_app/extlib', 'mode': 'new'},
- {
- 'id': 'install_into_locallib',
- 'run_command.calls': [(
- ['/testbin/cpanm', '--local-lib', '/srv/webapps/my_app/extlib', 'Dancer'],
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': True},
- (0, '', '',), # output rc, out, err
- )],
- 'changed': True,
- }
- ],
- [
- {'from_path': '/srv/webapps/my_app/src/', 'mode': 'new'},
- {
- 'id': 'install_from_local_directory',
- 'run_command.calls': [(
- ['/testbin/cpanm', '/srv/webapps/my_app/src/'],
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': True},
- (0, '', '',), # output rc, out, err
- )],
- 'changed': True,
- }
- ],
- [
- {'name': 'Dancer', 'locallib': '/srv/webapps/my_app/extlib', 'notest': True, 'mode': 'new'},
- {
- 'id': 'install_into_locallib_no_unit_testing',
- 'run_command.calls': [(
- ['/testbin/cpanm', '--notest', '--local-lib', '/srv/webapps/my_app/extlib', 'Dancer'],
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': True},
- (0, '', '',), # output rc, out, err
- )],
- 'changed': True,
- }
- ],
- [
- {'name': 'Dancer', 'mirror': 'http://cpan.cpantesters.org/', 'mode': 'new'},
- {
- 'id': 'install_from_mirror',
- 'run_command.calls': [(
- ['/testbin/cpanm', '--mirror', 'http://cpan.cpantesters.org/', 'Dancer'],
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': True},
- (0, '', '',), # output rc, out, err
- )],
- 'changed': True,
- }
- ],
- [
- {'name': 'Dancer', 'system_lib': True, 'mode': 'new'},
- {
- 'id': 'install_into_system_lib',
- 'run_command.calls': [],
- 'changed': False,
- 'failed': True,
- }
- ],
- [
- {'name': 'Dancer', 'version': '1.0', 'mode': 'new'},
- {
- 'id': 'install_minversion_implicit',
- 'run_command.calls': [(
- ['/testbin/cpanm', 'Dancer~1.0'],
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': True},
- (0, '', '',), # output rc, out, err
- )],
- 'changed': True,
- }
- ],
- [
- {'name': 'Dancer', 'version': '~1.5', 'mode': 'new'},
- {
- 'id': 'install_minversion_explicit',
- 'run_command.calls': [(
- ['/testbin/cpanm', 'Dancer~1.5'],
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': True},
- (0, '', '',), # output rc, out, err
- )],
- 'changed': True,
- }
- ],
- [
- {'name': 'Dancer', 'version': '@1.7', 'mode': 'new'},
- {
- 'id': 'install_specific_version',
- 'run_command.calls': [(
- ['/testbin/cpanm', 'Dancer@1.7'],
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': True},
- (0, '', '',), # output rc, out, err
- )],
- 'changed': True,
- 'failed': False,
- }
- ],
- [
- {'name': 'MIYAGAWA/Plack-0.99_05.tar.gz', 'version': '@1.7', 'mode': 'new'},
- {
- 'id': 'install_specific_version_from_file_error',
- 'run_command.calls': [],
- 'changed': False,
- 'failed': True,
- 'msg': "parameter 'version' must not be used when installing from a file",
- }
- ],
- [
- {'from_path': '~/', 'version': '@1.7', 'mode': 'new'},
- {
- 'id': 'install_specific_version_from_directory_error',
- 'run_command.calls': [],
- 'changed': False,
- 'failed': True,
- 'msg': "parameter 'version' must not be used when installing from a directory",
- }
- ],
- [
- {'name': 'git://github.com/plack/Plack.git', 'version': '@1.7', 'mode': 'new'},
- {
- 'id': 'install_specific_version_from_git_url_explicit',
- 'run_command.calls': [(
- ['/testbin/cpanm', 'git://github.com/plack/Plack.git@1.7'],
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': True},
- (0, '', '',), # output rc, out, err
- )],
- 'changed': True,
- 'failed': False,
- }
- ],
- [
- {'name': 'git://github.com/plack/Plack.git', 'version': '2.5', 'mode': 'new'},
- {
- 'id': 'install_specific_version_from_git_url_implicit',
- 'run_command.calls': [(
- ['/testbin/cpanm', 'git://github.com/plack/Plack.git@2.5'],
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': True},
- (0, '', '',), # output rc, out, err
- )],
- 'changed': True,
- 'failed': False,
- }
- ],
- [
- {'name': 'git://github.com/plack/Plack.git', 'version': '~2.5', 'mode': 'new'},
- {
- 'id': 'install_version_operator_from_git_url_error',
- 'run_command.calls': [],
- 'changed': False,
- 'failed': True,
- 'msg': "operator '~' not allowed in version parameter when installing from git repository",
- }
- ],
-]
-TEST_CASES_IDS = [item[1]['id'] for item in TEST_CASES]
-
-
-@pytest.mark.parametrize('patch_ansible_module, testcase',
- TEST_CASES,
- ids=TEST_CASES_IDS,
- indirect=['patch_ansible_module'])
-@pytest.mark.usefixtures('patch_ansible_module')
-def test_cpanm(mocker, capfd, patch_cpanm, testcase):
- """
- Run unit tests for test cases listen in TEST_CASES
- """
-
- # Mock function used for running commands first
- call_results = [item[2] for item in testcase['run_command.calls']]
- mock_run_command = mocker.patch(
- 'ansible_collections.community.general.plugins.module_utils.module_helper.AnsibleModule.run_command',
- side_effect=call_results)
-
- # Try to run test case
- with pytest.raises(SystemExit):
- cpanm.main()
-
- out, err = capfd.readouterr()
- results = json.loads(out)
- print("results =\n%s" % results)
-
- assert mock_run_command.call_count == len(testcase['run_command.calls'])
- if mock_run_command.call_count:
- call_args_list = [(item[0][0], item[1]) for item in mock_run_command.call_args_list]
- expected_call_args_list = [(item[0], item[1]) for item in testcase['run_command.calls']]
- print("call args list =\n%s" % call_args_list)
- print("expected args list =\n%s" % expected_call_args_list)
- assert call_args_list == expected_call_args_list
-
- assert results.get('changed', False) == testcase['changed']
- if 'failed' in testcase:
- assert results.get('failed', False) == testcase['failed']
- if 'msg' in testcase:
- assert results.get('msg', '') == testcase['msg']
+Helper.from_module(cpanm, __name__)
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_cpanm.yaml b/ansible_collections/community/general/tests/unit/plugins/modules/test_cpanm.yaml
new file mode 100644
index 000000000..3ed718d48
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_cpanm.yaml
@@ -0,0 +1,220 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) Alexei Znamensky (russoz@gmail.com)
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+---
+- id: install_dancer_compatibility
+ input:
+ name: Dancer
+ output:
+ changed: true
+ run_command_calls:
+ - command: [/testbin/perl, -le, 'use Dancer;']
+ environ: &env-def-false {environ_update: {LANGUAGE: C, LC_ALL: C}, check_rc: false}
+ rc: 2
+ out: ""
+ err: "error, not installed"
+ - command: [/testbin/cpanm, Dancer]
+ environ: &env-def-true {environ_update: {LANGUAGE: C, LC_ALL: C}, check_rc: true}
+ rc: 0
+ out: ""
+ err: ""
+- id: install_dancer_already_installed_compatibility
+ input:
+ name: Dancer
+ output:
+ changed: false
+ run_command_calls:
+ - command: [/testbin/perl, -le, 'use Dancer;']
+ environ: *env-def-false
+ rc: 0
+ out: ""
+ err: ""
+- id: install_dancer
+ input:
+ name: Dancer
+ mode: new
+ output:
+ changed: true
+ run_command_calls:
+ - command: [/testbin/cpanm, Dancer]
+ environ: *env-def-true
+ rc: 0
+ out: ""
+ err: ""
+- id: install_distribution_file_compatibility
+ input:
+ name: MIYAGAWA/Plack-0.99_05.tar.gz
+ output:
+ changed: true
+ run_command_calls:
+ - command: [/testbin/cpanm, MIYAGAWA/Plack-0.99_05.tar.gz]
+ environ: *env-def-true
+ rc: 0
+ out: ""
+ err: ""
+- id: install_distribution_file
+ input:
+ name: MIYAGAWA/Plack-0.99_05.tar.gz
+ mode: new
+ output:
+ changed: true
+ run_command_calls:
+ - command: [/testbin/cpanm, MIYAGAWA/Plack-0.99_05.tar.gz]
+ environ: *env-def-true
+ rc: 0
+ out: ""
+ err: ""
+- id: install_into_locallib
+ input:
+ name: Dancer
+ mode: new
+ locallib: /srv/webapps/my_app/extlib
+ output:
+ changed: true
+ run_command_calls:
+ - command: [/testbin/cpanm, --local-lib, /srv/webapps/my_app/extlib, Dancer]
+ environ: *env-def-true
+ rc: 0
+ out: ""
+ err: ""
+- id: install_from_local_directory
+ input:
+ from_path: /srv/webapps/my_app/src/
+ mode: new
+ output:
+ changed: true
+ run_command_calls:
+ - command: [/testbin/cpanm, /srv/webapps/my_app/src/]
+ environ: *env-def-true
+ rc: 0
+ out: ""
+ err: ""
+- id: install_into_locallib_no_unit_testing
+ input:
+ name: Dancer
+ notest: true
+ mode: new
+ locallib: /srv/webapps/my_app/extlib
+ output:
+ changed: true
+ run_command_calls:
+ - command: [/testbin/cpanm, --notest, --local-lib, /srv/webapps/my_app/extlib, Dancer]
+ environ: *env-def-true
+ rc: 0
+ out: ""
+ err: ""
+- id: install_from_mirror
+ input:
+ name: Dancer
+ mode: new
+ mirror: "http://cpan.cpantesters.org/"
+ output:
+ changed: true
+ run_command_calls:
+ - command: [/testbin/cpanm, --mirror, "http://cpan.cpantesters.org/", Dancer]
+ environ: *env-def-true
+ rc: 0
+ out: ""
+ err: ""
+- id: install_into_system_lib
+ input:
+ name: Dancer
+ mode: new
+ system_lib: true
+ output:
+ failed: true
+ run_command_calls: []
+- id: install_minversion_implicit
+ input:
+ name: Dancer
+ mode: new
+ version: "1.0"
+ output:
+ changed: true
+ run_command_calls:
+ - command: [/testbin/cpanm, Dancer~1.0]
+ environ: *env-def-true
+ rc: 0
+ out: ""
+ err: ""
+- id: install_minversion_explicit
+ input:
+ name: Dancer
+ mode: new
+ version: "~1.5"
+ output:
+ changed: true
+ run_command_calls:
+ - command: [/testbin/cpanm, Dancer~1.5]
+ environ: *env-def-true
+ rc: 0
+ out: ""
+ err: ""
+- id: install_specific_version
+ input:
+ name: Dancer
+ mode: new
+ version: "@1.7"
+ output:
+ changed: true
+ run_command_calls:
+ - command: [/testbin/cpanm, Dancer@1.7]
+ environ: *env-def-true
+ rc: 0
+ out: ""
+ err: ""
+- id: install_specific_version_from_file_error
+ input:
+ name: MIYAGAWA/Plack-0.99_05.tar.gz
+ mode: new
+ version: "@1.7"
+ output:
+ failed: true
+ msg: parameter 'version' must not be used when installing from a file
+ run_command_calls: []
+- id: install_specific_version_from_directory_error
+ input:
+ from_path: ~/
+ mode: new
+ version: "@1.7"
+ output:
+ failed: true
+ msg: parameter 'version' must not be used when installing from a directory
+ run_command_calls: []
+- id: install_specific_version_from_git_url_explicit
+ input:
+ name: "git://github.com/plack/Plack.git"
+ mode: new
+ version: "@1.7"
+ output:
+ changed: true
+ run_command_calls:
+ - command: [/testbin/cpanm, "git://github.com/plack/Plack.git@1.7"]
+ environ: *env-def-true
+ rc: 0
+ out: ""
+ err: ""
+- id: install_specific_version_from_git_url_implicit
+ input:
+ name: "git://github.com/plack/Plack.git"
+ mode: new
+ version: "2.5"
+ output:
+ changed: true
+ run_command_calls:
+ - command: [/testbin/cpanm, "git://github.com/plack/Plack.git@2.5"]
+ environ: *env-def-true
+ rc: 0
+ out: ""
+ err: ""
+- id: install_version_operator_from_git_url_error
+ input:
+ name: "git://github.com/plack/Plack.git"
+ mode: new
+ version: "~2.5"
+ output:
+ failed: true
+ msg: operator '~' not allowed in version parameter when installing from git repository
+ run_command_calls: []
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_dnf_config_manager.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_dnf_config_manager.py
new file mode 100644
index 000000000..90bffe436
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_dnf_config_manager.py
@@ -0,0 +1,402 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2023, Andrew Hyatt <andy@hyatt.xyz>
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+from ansible_collections.community.general.tests.unit.compat.mock import patch, call
+from ansible_collections.community.general.plugins.modules import dnf_config_manager as dnf_config_manager_module
+from ansible_collections.community.general.tests.unit.plugins.modules.utils import AnsibleExitJson, AnsibleFailJson, \
+ ModuleTestCase, set_module_args
+
+# Return value on all-default arguments
+mock_repolist_crb_enabled = """Loaded plugins: builddep, changelog, config-manager, copr, debug, debuginfo-install
+DNF version: 4.14.0
+cachedir: /var/cache/dnf
+Last metadata expiration check: 1:20:49 ago on Fri 22 Dec 2023 06:05:13 PM UTC.
+Repo-id : appstream
+Repo-name : AlmaLinux 9 - AppStream
+Repo-status : enabled
+Repo-revision : 1703240474
+Repo-updated : Fri 22 Dec 2023 10:21:14 AM UTC
+Repo-pkgs : 5,897
+Repo-available-pkgs: 5,728
+Repo-size : 9.5 G
+Repo-mirrors : https://mirrors.almalinux.org/mirrorlist/9/appstream
+Repo-baseurl : http://mirror.cogentco.com/pub/linux/almalinux/9.3/AppStream/x86_64/os/ (9 more)
+Repo-expire : 86,400 second(s) (last: Fri 22 Dec 2023 06:05:11 PM UTC)
+Repo-filename : /etc/yum.repos.d/almalinux-appstream.repo
+
+Repo-id : appstream-debuginfo
+Repo-name : AlmaLinux 9 - AppStream - Debug
+Repo-status : disabled
+Repo-mirrors : https://mirrors.almalinux.org/mirrorlist/9/appstream-debug
+Repo-expire : 86,400 second(s) (last: unknown)
+Repo-filename : /etc/yum.repos.d/almalinux-appstream.repo
+
+Repo-id : appstream-source
+Repo-name : AlmaLinux 9 - AppStream - Source
+Repo-status : disabled
+Repo-mirrors : https://mirrors.almalinux.org/mirrorlist/9/appstream-source
+Repo-expire : 86,400 second(s) (last: unknown)
+Repo-filename : /etc/yum.repos.d/almalinux-appstream.repo
+
+Repo-id : baseos
+Repo-name : AlmaLinux 9 - BaseOS
+Repo-status : enabled
+Repo-revision : 1703240561
+Repo-updated : Fri 22 Dec 2023 10:22:41 AM UTC
+Repo-pkgs : 1,244
+Repo-available-pkgs: 1,244
+Repo-size : 1.3 G
+Repo-mirrors : https://mirrors.almalinux.org/mirrorlist/9/baseos
+Repo-baseurl : http://mirror.cogentco.com/pub/linux/almalinux/9.3/BaseOS/x86_64/os/ (9 more)
+Repo-expire : 86,400 second(s) (last: Fri 22 Dec 2023 06:05:11 PM UTC)
+Repo-filename : /etc/yum.repos.d/almalinux-baseos.repo
+
+Repo-id : baseos-debuginfo
+Repo-name : AlmaLinux 9 - BaseOS - Debug
+Repo-status : disabled
+Repo-mirrors : https://mirrors.almalinux.org/mirrorlist/9/baseos-debug
+Repo-expire : 86,400 second(s) (last: unknown)
+Repo-filename : /etc/yum.repos.d/almalinux-baseos.repo
+
+Repo-id : baseos-source
+Repo-name : AlmaLinux 9 - BaseOS - Source
+Repo-status : disabled
+Repo-mirrors : https://mirrors.almalinux.org/mirrorlist/9/baseos-source
+Repo-expire : 86,400 second(s) (last: unknown)
+Repo-filename : /etc/yum.repos.d/almalinux-baseos.repo
+
+Repo-id : copr:copr.fedorainfracloud.org:uriesk:dracut-crypt-ssh
+Repo-name : Copr repo for dracut-crypt-ssh owned by uriesk
+Repo-status : enabled
+Repo-revision : 1698291016
+Repo-updated : Thu 26 Oct 2023 03:30:16 AM UTC
+Repo-pkgs : 4
+Repo-available-pkgs: 4
+Repo-size : 102 k
+Repo-baseurl : https://download.copr.fedorainfracloud.org/results/uriesk/dracut-crypt-ssh/epel-9-x86_64/
+Repo-expire : 172,800 second(s) (last: Fri 22 Dec 2023 06:05:10 PM UTC)
+Repo-filename : /etc/yum.repos.d/_copr:copr.fedorainfracloud.org:uriesk:dracut-crypt-ssh.repo
+
+Repo-id : crb
+Repo-name : AlmaLinux 9 - CRB
+Repo-status : enabled
+Repo-revision : 1703240590
+Repo-updated : Fri 22 Dec 2023 10:23:10 AM UTC
+Repo-pkgs : 1,730
+Repo-available-pkgs: 1,727
+Repo-size : 13 G
+Repo-mirrors : https://mirrors.almalinux.org/mirrorlist/9/crb
+Repo-baseurl : http://mirror.cogentco.com/pub/linux/almalinux/9.3/CRB/x86_64/os/ (9 more)
+Repo-expire : 86,400 second(s) (last: Fri 22 Dec 2023 06:05:11 PM UTC)
+Repo-filename : /etc/yum.repos.d/almalinux-crb.repo
+
+Repo-id : rpmfusion-nonfree-updates
+Repo-name : RPM Fusion for EL 9 - Nonfree - Updates
+Repo-status : enabled
+Repo-revision : 1703248251
+Repo-tags : binary-x86_64
+Repo-updated : Fri 22 Dec 2023 12:30:53 PM UTC
+Repo-pkgs : 65
+Repo-available-pkgs: 65
+Repo-size : 944 M
+Repo-metalink : http://mirrors.rpmfusion.org/metalink?repo=nonfree-el-updates-released-9&arch=x86_64
+ Updated : Fri 22 Dec 2023 06:05:13 PM UTC
+Repo-baseurl : http://uvermont.mm.fcix.net/rpmfusion/nonfree/el/updates/9/x86_64/ (33 more)
+Repo-expire : 172,800 second(s) (last: Fri 22 Dec 2023 06:05:13 PM UTC)
+Repo-filename : /etc/yum.repos.d/rpmfusion-nonfree-updates.repo
+Total packages: 28,170
+"""
+
+mock_repolist_crb_disabled = """Loaded plugins: builddep, changelog, config-manager, copr, debug, debuginfo-install
+DNF version: 4.14.0
+cachedir: /var/cache/dnf
+Last metadata expiration check: 1:20:49 ago on Fri 22 Dec 2023 06:05:13 PM UTC.
+Repo-id : appstream
+Repo-name : AlmaLinux 9 - AppStream
+Repo-status : enabled
+Repo-revision : 1703240474
+Repo-updated : Fri 22 Dec 2023 10:21:14 AM UTC
+Repo-pkgs : 5,897
+Repo-available-pkgs: 5,728
+Repo-size : 9.5 G
+Repo-mirrors : https://mirrors.almalinux.org/mirrorlist/9/appstream
+Repo-baseurl : http://mirror.cogentco.com/pub/linux/almalinux/9.3/AppStream/x86_64/os/ (9 more)
+Repo-expire : 86,400 second(s) (last: Fri 22 Dec 2023 06:05:11 PM UTC)
+Repo-filename : /etc/yum.repos.d/almalinux-appstream.repo
+
+Repo-id : appstream-debuginfo
+Repo-name : AlmaLinux 9 - AppStream - Debug
+Repo-status : disabled
+Repo-mirrors : https://mirrors.almalinux.org/mirrorlist/9/appstream-debug
+Repo-expire : 86,400 second(s) (last: unknown)
+Repo-filename : /etc/yum.repos.d/almalinux-appstream.repo
+
+Repo-id : appstream-source
+Repo-name : AlmaLinux 9 - AppStream - Source
+Repo-status : disabled
+Repo-mirrors : https://mirrors.almalinux.org/mirrorlist/9/appstream-source
+Repo-expire : 86,400 second(s) (last: unknown)
+Repo-filename : /etc/yum.repos.d/almalinux-appstream.repo
+
+Repo-id : baseos
+Repo-name : AlmaLinux 9 - BaseOS
+Repo-status : enabled
+Repo-revision : 1703240561
+Repo-updated : Fri 22 Dec 2023 10:22:41 AM UTC
+Repo-pkgs : 1,244
+Repo-available-pkgs: 1,244
+Repo-size : 1.3 G
+Repo-mirrors : https://mirrors.almalinux.org/mirrorlist/9/baseos
+Repo-baseurl : http://mirror.cogentco.com/pub/linux/almalinux/9.3/BaseOS/x86_64/os/ (9 more)
+Repo-expire : 86,400 second(s) (last: Fri 22 Dec 2023 06:05:11 PM UTC)
+Repo-filename : /etc/yum.repos.d/almalinux-baseos.repo
+
+Repo-id : baseos-debuginfo
+Repo-name : AlmaLinux 9 - BaseOS - Debug
+Repo-status : disabled
+Repo-mirrors : https://mirrors.almalinux.org/mirrorlist/9/baseos-debug
+Repo-expire : 86,400 second(s) (last: unknown)
+Repo-filename : /etc/yum.repos.d/almalinux-baseos.repo
+
+Repo-id : baseos-source
+Repo-name : AlmaLinux 9 - BaseOS - Source
+Repo-status : disabled
+Repo-mirrors : https://mirrors.almalinux.org/mirrorlist/9/baseos-source
+Repo-expire : 86,400 second(s) (last: unknown)
+Repo-filename : /etc/yum.repos.d/almalinux-baseos.repo
+
+Repo-id : copr:copr.fedorainfracloud.org:uriesk:dracut-crypt-ssh
+Repo-name : Copr repo for dracut-crypt-ssh owned by uriesk
+Repo-status : enabled
+Repo-revision : 1698291016
+Repo-updated : Thu 26 Oct 2023 03:30:16 AM UTC
+Repo-pkgs : 4
+Repo-available-pkgs: 4
+Repo-size : 102 k
+Repo-baseurl : https://download.copr.fedorainfracloud.org/results/uriesk/dracut-crypt-ssh/epel-9-x86_64/
+Repo-expire : 172,800 second(s) (last: Fri 22 Dec 2023 06:05:10 PM UTC)
+Repo-filename : /etc/yum.repos.d/_copr:copr.fedorainfracloud.org:uriesk:dracut-crypt-ssh.repo
+
+Repo-id : crb
+Repo-name : AlmaLinux 9 - CRB
+Repo-status : disabled
+Repo-revision : 1703240590
+Repo-updated : Fri 22 Dec 2023 10:23:10 AM UTC
+Repo-pkgs : 1,730
+Repo-available-pkgs: 1,727
+Repo-size : 13 G
+Repo-mirrors : https://mirrors.almalinux.org/mirrorlist/9/crb
+Repo-baseurl : http://mirror.cogentco.com/pub/linux/almalinux/9.3/CRB/x86_64/os/ (9 more)
+Repo-expire : 86,400 second(s) (last: Fri 22 Dec 2023 06:05:11 PM UTC)
+Repo-filename : /etc/yum.repos.d/almalinux-crb.repo
+
+Repo-id : rpmfusion-nonfree-updates
+Repo-name : RPM Fusion for EL 9 - Nonfree - Updates
+Repo-status : enabled
+Repo-revision : 1703248251
+Repo-tags : binary-x86_64
+Repo-updated : Fri 22 Dec 2023 12:30:53 PM UTC
+Repo-pkgs : 65
+Repo-available-pkgs: 65
+Repo-size : 944 M
+Repo-metalink : http://mirrors.rpmfusion.org/metalink?repo=nonfree-el-updates-released-9&arch=x86_64
+ Updated : Fri 22 Dec 2023 06:05:13 PM UTC
+Repo-baseurl : http://uvermont.mm.fcix.net/rpmfusion/nonfree/el/updates/9/x86_64/ (33 more)
+Repo-expire : 172,800 second(s) (last: Fri 22 Dec 2023 06:05:13 PM UTC)
+Repo-filename : /etc/yum.repos.d/rpmfusion-nonfree-updates.repo
+Total packages: 28,170
+"""
+
+mock_repolist_no_status = """Repo-id : appstream-debuginfo
+Repo-name : AlmaLinux 9 - AppStream - Debug
+Repo-mirrors : https://mirrors.almalinux.org/mirrorlist/9/appstream-debug
+Repo-expire : 86,400 second(s) (last: unknown)
+Repo-filename : /etc/yum.repos.d/almalinux-appstream.repo
+
+Repo-id : appstream-source
+Repo-name : AlmaLinux 9 - AppStream - Source
+Repo-status : disabled
+Repo-mirrors : https://mirrors.almalinux.org/mirrorlist/9/appstream-source
+Repo-expire : 86,400 second(s) (last: unknown)
+Repo-filename : /etc/yum.repos.d/almalinux-appstream.repo
+"""
+
+mock_repolist_status_before_id = """
+Repo-id : appstream-debuginfo
+Repo-status : disabled
+Repo-status : disabled
+"""
+
+expected_repo_states_crb_enabled = {'disabled': ['appstream-debuginfo',
+ 'appstream-source',
+ 'baseos-debuginfo',
+ 'baseos-source'],
+ 'enabled': ['appstream',
+ 'baseos',
+ 'copr:copr.fedorainfracloud.org:uriesk:dracut-crypt-ssh',
+ 'crb',
+ 'rpmfusion-nonfree-updates']}
+
+expected_repo_states_crb_disabled = {'disabled': ['appstream-debuginfo',
+ 'appstream-source',
+ 'baseos-debuginfo',
+ 'baseos-source',
+ 'crb'],
+ 'enabled': ['appstream',
+ 'baseos',
+ 'copr:copr.fedorainfracloud.org:uriesk:dracut-crypt-ssh',
+ 'rpmfusion-nonfree-updates']}
+
+call_get_repo_states = call(['/usr/bin/dnf', 'repolist', '--all', '--verbose'], check_rc=True)
+call_disable_crb = call(['/usr/bin/dnf', 'config-manager', '--set-disabled', 'crb'], check_rc=True)
+call_enable_crb = call(['/usr/bin/dnf', 'config-manager', '--set-enabled', 'crb'], check_rc=True)
+
+
+class TestDNFConfigManager(ModuleTestCase):
+ def setUp(self):
+ super(TestDNFConfigManager, self).setUp()
+ self.mock_run_command = (patch('ansible.module_utils.basic.AnsibleModule.run_command'))
+ self.run_command = self.mock_run_command.start()
+ self.mock_path_exists = (patch('os.path.exists'))
+ self.path_exists = self.mock_path_exists.start()
+ self.path_exists.return_value = True
+ self.module = dnf_config_manager_module
+
+ def tearDown(self):
+ super(TestDNFConfigManager, self).tearDown()
+ self.mock_run_command.stop()
+ self.mock_path_exists.stop()
+
+ def set_command_mock(self, execute_return=(0, '', ''), execute_side_effect=None):
+ self.run_command.reset_mock()
+ self.run_command.return_value = execute_return
+ self.run_command.side_effect = execute_side_effect
+
+ def execute_module(self, failed=False, changed=False):
+ if failed:
+ result = self.failed()
+ self.assertTrue(result['failed'])
+ else:
+ result = self.changed(changed)
+ self.assertEqual(result['changed'], changed)
+
+ return result
+
+ def failed(self):
+ with self.assertRaises(AnsibleFailJson) as exc:
+ self.module.main()
+
+ result = exc.exception.args[0]
+ self.assertTrue(result['failed'])
+ return result
+
+ def changed(self, changed=False):
+ with self.assertRaises(AnsibleExitJson) as exc:
+ self.module.main()
+
+ result = exc.exception.args[0]
+ self.assertEqual(result['changed'], changed)
+ return result
+
+ def test_get_repo_states(self):
+ set_module_args({})
+ self.set_command_mock(execute_return=(0, mock_repolist_crb_enabled, ''))
+ result = self.execute_module(changed=False)
+ self.assertEqual(result['repo_states_pre'], expected_repo_states_crb_enabled)
+ self.assertEqual(result['repo_states_post'], expected_repo_states_crb_enabled)
+ self.assertEqual(result['changed_repos'], [])
+ self.run_command.assert_has_calls(calls=[call_get_repo_states, call_get_repo_states], any_order=False)
+
+ def test_enable_disabled_repo(self):
+ set_module_args({
+ 'name': ['crb'],
+ 'state': 'enabled'
+ })
+ side_effects = [(0, mock_repolist_crb_disabled, ''), (0, '', ''), (0, mock_repolist_crb_enabled, '')]
+ self.set_command_mock(execute_side_effect=side_effects)
+ result = self.execute_module(changed=True)
+ self.assertEqual(result['repo_states_pre'], expected_repo_states_crb_disabled)
+ self.assertEqual(result['repo_states_post'], expected_repo_states_crb_enabled)
+ self.assertEqual(result['changed_repos'], ['crb'])
+ expected_calls = [call_get_repo_states, call_enable_crb, call_get_repo_states]
+ self.run_command.assert_has_calls(calls=expected_calls, any_order=False)
+
+ def test_enable_disabled_repo_check_mode(self):
+ set_module_args({
+ 'name': ['crb'],
+ 'state': 'enabled',
+ '_ansible_check_mode': True
+ })
+ side_effects = [(0, mock_repolist_crb_disabled, ''), (0, mock_repolist_crb_disabled, '')]
+ self.set_command_mock(execute_side_effect=side_effects)
+ result = self.execute_module(changed=True)
+ self.assertEqual(result['changed_repos'], ['crb'])
+ self.run_command.assert_has_calls(calls=[call_get_repo_states], any_order=False)
+
+ def test_disable_enabled_repo(self):
+ set_module_args({
+ 'name': ['crb'],
+ 'state': 'disabled'
+ })
+ side_effects = [(0, mock_repolist_crb_enabled, ''), (0, '', ''), (0, mock_repolist_crb_disabled, '')]
+ self.set_command_mock(execute_side_effect=side_effects)
+ result = self.execute_module(changed=True)
+ self.assertEqual(result['repo_states_pre'], expected_repo_states_crb_enabled)
+ self.assertEqual(result['repo_states_post'], expected_repo_states_crb_disabled)
+ self.assertEqual(result['changed_repos'], ['crb'])
+ expected_calls = [call_get_repo_states, call_disable_crb, call_get_repo_states]
+ self.run_command.assert_has_calls(calls=expected_calls, any_order=False)
+
+ def test_crb_already_enabled(self):
+ set_module_args({
+ 'name': ['crb'],
+ 'state': 'enabled'
+ })
+ side_effects = [(0, mock_repolist_crb_enabled, ''), (0, mock_repolist_crb_enabled, '')]
+ self.set_command_mock(execute_side_effect=side_effects)
+ result = self.execute_module(changed=False)
+ self.assertEqual(result['repo_states_pre'], expected_repo_states_crb_enabled)
+ self.assertEqual(result['repo_states_post'], expected_repo_states_crb_enabled)
+ self.assertEqual(result['changed_repos'], [])
+ self.run_command.assert_has_calls(calls=[call_get_repo_states, call_get_repo_states], any_order=False)
+
+ def test_get_repo_states_fail_no_status(self):
+ set_module_args({})
+ self.set_command_mock(execute_return=(0, mock_repolist_no_status, ''))
+ result = self.execute_module(failed=True)
+ self.assertEqual(result['msg'], 'dnf repolist parse failure: parsed another repo id before next status')
+ self.run_command.assert_has_calls(calls=[call_get_repo_states], any_order=False)
+
+ def test_get_repo_states_fail_status_before_id(self):
+ set_module_args({})
+ self.set_command_mock(execute_return=(0, mock_repolist_status_before_id, ''))
+ result = self.execute_module(failed=True)
+ self.assertEqual(result['msg'], 'dnf repolist parse failure: parsed status before repo id')
+ self.run_command.assert_has_calls(calls=[call_get_repo_states], any_order=False)
+
+ def test_failed__unknown_repo_id(self):
+ set_module_args({
+ 'name': ['fake']
+ })
+ self.set_command_mock(execute_return=(0, mock_repolist_crb_disabled, ''))
+ result = self.execute_module(failed=True)
+ self.assertEqual(result['msg'], "did not find repo with ID 'fake' in dnf repolist --all --verbose")
+ self.run_command.assert_has_calls(calls=[call_get_repo_states], any_order=False)
+
+ def test_failed_state_change_ineffective(self):
+ set_module_args({
+ 'name': ['crb'],
+ 'state': 'enabled'
+ })
+ side_effects = [(0, mock_repolist_crb_disabled, ''), (0, '', ''), (0, mock_repolist_crb_disabled, '')]
+ self.set_command_mock(execute_side_effect=side_effects)
+ result = self.execute_module(failed=True)
+ self.assertEqual(result['msg'], "dnf config-manager failed to make 'crb' enabled")
+ expected_calls = [call_get_repo_states, call_enable_crb, call_get_repo_states]
+ self.run_command.assert_has_calls(calls=expected_calls, any_order=False)
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_dnsimple.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_dnsimple.py
index 95a78818d..d5578252d 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_dnsimple.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_dnsimple.py
@@ -45,7 +45,7 @@ class TestDNSimple(ModuleTestCase):
def test_account_token(self, mock_whoami):
mock_whoami.return_value.data.account = 42
ds = self.module.DNSimpleV2('fake', 'fake', True, self.module)
- self.assertEquals(ds.account, 42)
+ self.assertEqual(ds.account, 42)
@patch('dnsimple.service.Accounts.list_accounts')
@patch('dnsimple.service.Identity.whoami')
@@ -61,4 +61,4 @@ class TestDNSimple(ModuleTestCase):
mock_accounts.return_value.data = [42]
mock_whoami.return_value.data.account = None
ds = self.module.DNSimpleV2('fake', 'fake', True, self.module)
- self.assertEquals(ds.account, 42)
+ self.assertEqual(ds.account, 42)
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_dnsimple_info.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_dnsimple_info.py
index 5806ec772..08c5296c8 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_dnsimple_info.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_dnsimple_info.py
@@ -89,7 +89,7 @@ class TestDNSimple_Info(ModuleTestCase):
result = exc_info.exception.args[0]
# nothing should change
self.assertFalse(result['changed'])
- # we should return at least one item with mathing domain
+ # we should return at least one item with matching domain
assert result['dnsimple_records_info'][0]['name'] == name
@with_httmock(records_resp)
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_facter_facts.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_facter_facts.py
new file mode 100644
index 000000000..227d8cd15
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_facter_facts.py
@@ -0,0 +1,14 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) Alexei Znamensky (russoz@gmail.com)
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+from ansible_collections.community.general.plugins.modules import facter_facts
+from .helper import Helper
+
+
+Helper.from_module(facter_facts, __name__)
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_facter_facts.yaml b/ansible_collections/community/general/tests/unit/plugins/modules/test_facter_facts.yaml
new file mode 100644
index 000000000..c287fdcfd
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_facter_facts.yaml
@@ -0,0 +1,40 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) Alexei Znamensky (russoz@gmail.com)
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+---
+- id: simple run
+ output:
+ ansible_facts:
+ facter:
+ a: 1
+ b: 2
+ c: 3
+ run_command_calls:
+ - command: [/testbin/facter, --json]
+ environ: &env-def {check_rc: true}
+ rc: 0
+ out: >
+ { "a": 1, "b": 2, "c": 3 }
+ err: ""
+- id: with args
+ input:
+ arguments:
+ - -p
+ - system_uptime
+ - timezone
+ - is_virtual
+ output:
+ ansible_facts:
+ facter:
+ a: 1
+ b: 2
+ c: 3
+ run_command_calls:
+ - command: [/testbin/facter, --json, -p, system_uptime, timezone, is_virtual]
+ environ: *env-def
+ rc: 0
+ out: >
+ { "a": 1, "b": 2, "c": 3 }
+ err: ""
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_gconftool2.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_gconftool2.py
index f01f15ef8..9608016e5 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_gconftool2.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_gconftool2.py
@@ -6,111 +6,9 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import json
from ansible_collections.community.general.plugins.modules import gconftool2
+from .helper import Helper
-import pytest
-TESTED_MODULE = gconftool2.__name__
-
-
-@pytest.fixture
-def patch_gconftool2(mocker):
- """
- Function used for mocking some parts of redhat_subscription module
- """
- mocker.patch('ansible_collections.community.general.plugins.module_utils.mh.module_helper.AnsibleModule.get_bin_path',
- return_value='/testbin/gconftool-2')
-
-
-TEST_CASES = [
- [
- {'state': 'get', 'key': '/desktop/gnome/background/picture_filename'},
- {
- 'id': 'test_simple_element_get',
- 'run_command.calls': [
- (
- ['/testbin/gconftool-2', '--get', '/desktop/gnome/background/picture_filename'],
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': True},
- (0, '100\n', '',),
- ),
- ],
- 'new_value': '100',
- }
- ],
- [
- {'state': 'get', 'key': '/desktop/gnome/background/picture_filename'},
- {
- 'id': 'test_simple_element_get_not_found',
- 'run_command.calls': [
- (
- ['/testbin/gconftool-2', '--get', '/desktop/gnome/background/picture_filename'],
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': True},
- (0, '', "No value set for `/desktop/gnome/background/picture_filename'\n",),
- ),
- ],
- 'new_value': None,
- }
- ],
- [
- {'state': 'present', 'key': '/desktop/gnome/background/picture_filename', 'value': '200', 'value_type': 'int'},
- {
- 'id': 'test_simple_element_set',
- 'run_command.calls': [
- (
- ['/testbin/gconftool-2', '--get', '/desktop/gnome/background/picture_filename'],
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': True},
- (0, '100\n', '',),
- ),
- (
- ['/testbin/gconftool-2', '--type', 'int', '--set', '/desktop/gnome/background/picture_filename', '200'],
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': True},
- (0, '200\n', '',),
- ),
- ],
- 'new_value': '200',
- }
- ],
-]
-TEST_CASES_IDS = [item[1]['id'] for item in TEST_CASES]
-
-
-@pytest.mark.parametrize('patch_ansible_module, testcase',
- TEST_CASES,
- ids=TEST_CASES_IDS,
- indirect=['patch_ansible_module'])
-@pytest.mark.usefixtures('patch_ansible_module')
-def test_gconftool2(mocker, capfd, patch_gconftool2, testcase):
- """
- Run unit tests for test cases listen in TEST_CASES
- """
-
- # Mock function used for running commands first
- call_results = [item[2] for item in testcase['run_command.calls']]
- mock_run_command = mocker.patch(
- 'ansible_collections.community.general.plugins.module_utils.mh.module_helper.AnsibleModule.run_command',
- side_effect=call_results)
-
- # Try to run test case
- with pytest.raises(SystemExit):
- gconftool2.main()
-
- out, err = capfd.readouterr()
- results = json.loads(out)
- print("testcase =\n%s" % testcase)
- print("results =\n%s" % results)
-
- for conditional_test_result in ('value',):
- if conditional_test_result in testcase:
- assert conditional_test_result in results, "'{0}' not found in {1}".format(conditional_test_result, results)
- assert results[conditional_test_result] == testcase[conditional_test_result], \
- "'{0}': '{1}' != '{2}'".format(conditional_test_result, results[conditional_test_result], testcase[conditional_test_result])
-
- assert mock_run_command.call_count == len(testcase['run_command.calls'])
- if mock_run_command.call_count:
- call_args_list = [(item[0][0], item[1]) for item in mock_run_command.call_args_list]
- expected_call_args_list = [(item[0], item[1]) for item in testcase['run_command.calls']]
- print("call args list =\n%s" % call_args_list)
- print("expected args list =\n%s" % expected_call_args_list)
- assert call_args_list == expected_call_args_list
+Helper.from_module(gconftool2, __name__)
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_gconftool2.yaml b/ansible_collections/community/general/tests/unit/plugins/modules/test_gconftool2.yaml
new file mode 100644
index 000000000..5114dc45f
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_gconftool2.yaml
@@ -0,0 +1,117 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) Alexei Znamensky (russoz@gmail.com)
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+---
+- id: test_simple_element_set
+ input:
+ state: present
+ key: /desktop/gnome/background/picture_filename
+ value: 200
+ value_type: int
+ output:
+ new_value: '200'
+ changed: true
+ run_command_calls:
+ - command: [/testbin/gconftool-2, --get, /desktop/gnome/background/picture_filename]
+ environ: &env-def {environ_update: {LANGUAGE: C, LC_ALL: C}, check_rc: true}
+ rc: 0
+ out: "100\n"
+ err: ""
+ - command: [/testbin/gconftool-2, --type, int, --set, /desktop/gnome/background/picture_filename, "200"]
+ environ: *env-def
+ rc: 0
+ out: ""
+ err: ""
+ - command: [/testbin/gconftool-2, --get, /desktop/gnome/background/picture_filename]
+ environ: *env-def
+ rc: 0
+ out: "200\n"
+ err: ""
+- id: test_simple_element_set_idempotency_int
+ input:
+ state: present
+ key: /desktop/gnome/background/picture_filename
+ value: 200
+ value_type: int
+ output:
+ new_value: '200'
+ changed: false
+ run_command_calls:
+ - command: [/testbin/gconftool-2, --get, /desktop/gnome/background/picture_filename]
+ environ: *env-def
+ rc: 0
+ out: "200\n"
+ err: ""
+ - command: [/testbin/gconftool-2, --type, int, --set, /desktop/gnome/background/picture_filename, "200"]
+ environ: *env-def
+ rc: 0
+ out: ""
+ err: ""
+ - command: [/testbin/gconftool-2, --get, /desktop/gnome/background/picture_filename]
+ environ: *env-def
+ rc: 0
+ out: "200\n"
+ err: ""
+- id: test_simple_element_set_idempotency_bool
+ input:
+ state: present
+ key: /apps/gnome_settings_daemon/screensaver/start_screensaver
+ value: false
+ value_type: bool
+ output:
+ new_value: 'false'
+ changed: false
+ run_command_calls:
+ - command: [/testbin/gconftool-2, --get, /apps/gnome_settings_daemon/screensaver/start_screensaver]
+ environ: *env-def
+ rc: 0
+ out: "false\n"
+ err: ""
+ - command: [/testbin/gconftool-2, --type, bool, --set, /apps/gnome_settings_daemon/screensaver/start_screensaver, "False"]
+ environ: *env-def
+ rc: 0
+ out: ""
+ err: ""
+ - command: [/testbin/gconftool-2, --get, /apps/gnome_settings_daemon/screensaver/start_screensaver]
+ environ: *env-def
+ rc: 0
+ out: "false\n"
+ err: ""
+- id: test_simple_element_unset
+ input:
+ state: absent
+ key: /desktop/gnome/background/picture_filename
+ output:
+ new_value: null
+ changed: true
+ run_command_calls:
+ - command: [/testbin/gconftool-2, --get, /desktop/gnome/background/picture_filename]
+ environ: *env-def
+ rc: 0
+ out: "200\n"
+ err: ""
+ - command: [/testbin/gconftool-2, --unset, /desktop/gnome/background/picture_filename]
+ environ: *env-def
+ rc: 0
+ out: ""
+ err: ""
+- id: test_simple_element_unset_idempotency
+ input:
+ state: absent
+ key: /apps/gnome_settings_daemon/screensaver/start_screensaver
+ output:
+ new_value: null
+ changed: false
+ run_command_calls:
+ - command: [/testbin/gconftool-2, --get, /apps/gnome_settings_daemon/screensaver/start_screensaver]
+ environ: *env-def
+ rc: 0
+ out: ""
+ err: ""
+ - command: [/testbin/gconftool-2, --unset, /apps/gnome_settings_daemon/screensaver/start_screensaver]
+ environ: *env-def
+ rc: 0
+ out: ""
+ err: ""
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_gconftool2_info.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_gconftool2_info.py
index 352af6bb0..54676a12d 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_gconftool2_info.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_gconftool2_info.py
@@ -6,98 +6,9 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import json
from ansible_collections.community.general.plugins.modules import gconftool2_info
+from .helper import Helper
-import pytest
-TESTED_MODULE = gconftool2_info.__name__
-
-
-@pytest.fixture
-def patch_gconftool2_info(mocker):
- """
- Function used for mocking some parts of redhat_subscription module
- """
- mocker.patch('ansible_collections.community.general.plugins.module_utils.mh.module_helper.AnsibleModule.get_bin_path',
- return_value='/testbin/gconftool-2')
-
-
-TEST_CASES = [
- [
- {'key': '/desktop/gnome/background/picture_filename'},
- {
- 'id': 'test_simple_element_get',
- 'run_command.calls': [
- (
- # Calling of following command will be asserted
- ['/testbin/gconftool-2', '--get', '/desktop/gnome/background/picture_filename'],
- # Was return code checked?
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': True},
- # Mock of returned code, stdout and stderr
- (0, '100\n', '',),
- ),
- ],
- 'value': '100',
- }
- ],
- [
- {'key': '/desktop/gnome/background/picture_filename'},
- {
- 'id': 'test_simple_element_get_not_found',
- 'run_command.calls': [
- (
- # Calling of following command will be asserted
- ['/testbin/gconftool-2', '--get', '/desktop/gnome/background/picture_filename'],
- # Was return code checked?
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': True},
- # Mock of returned code, stdout and stderr
- (0, '', "No value set for `/desktop/gnome/background/picture_filename'\n",),
- ),
- ],
- 'value': None,
- }
- ],
-]
-TEST_CASES_IDS = [item[1]['id'] for item in TEST_CASES]
-
-
-@pytest.mark.parametrize('patch_ansible_module, testcase',
- TEST_CASES,
- ids=TEST_CASES_IDS,
- indirect=['patch_ansible_module'])
-@pytest.mark.usefixtures('patch_ansible_module')
-def test_gconftool2_info(mocker, capfd, patch_gconftool2_info, testcase):
- """
- Run unit tests for test cases listen in TEST_CASES
- """
-
- # Mock function used for running commands first
- call_results = [item[2] for item in testcase['run_command.calls']]
- mock_run_command = mocker.patch(
- 'ansible_collections.community.general.plugins.module_utils.mh.module_helper.AnsibleModule.run_command',
- side_effect=call_results)
-
- # Try to run test case
- with pytest.raises(SystemExit):
- gconftool2_info.main()
-
- out, err = capfd.readouterr()
- results = json.loads(out)
- print("testcase =\n%s" % testcase)
- print("results =\n%s" % results)
-
- for conditional_test_result in ('value',):
- if conditional_test_result in testcase:
- assert conditional_test_result in results, "'{0}' not found in {1}".format(conditional_test_result, results)
- assert results[conditional_test_result] == testcase[conditional_test_result], \
- "'{0}': '{1}' != '{2}'".format(conditional_test_result, results[conditional_test_result], testcase[conditional_test_result])
-
- assert mock_run_command.call_count == len(testcase['run_command.calls'])
- if mock_run_command.call_count:
- call_args_list = [(item[0][0], item[1]) for item in mock_run_command.call_args_list]
- expected_call_args_list = [(item[0], item[1]) for item in testcase['run_command.calls']]
- print("call args list =\n%s" % call_args_list)
- print("expected args list =\n%s" % expected_call_args_list)
- assert call_args_list == expected_call_args_list
+Helper.from_module(gconftool2_info, __name__)
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_gconftool2_info.yaml b/ansible_collections/community/general/tests/unit/plugins/modules/test_gconftool2_info.yaml
new file mode 100644
index 000000000..eb8bef750
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_gconftool2_info.yaml
@@ -0,0 +1,28 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) Alexei Znamensky (russoz@gmail.com)
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+---
+- id: test_simple_element_get
+ input:
+ key: /desktop/gnome/background/picture_filename
+ output:
+ value: '100'
+ run_command_calls:
+ - command: [/testbin/gconftool-2, --get, /desktop/gnome/background/picture_filename]
+ environ: &env-def {environ_update: {LANGUAGE: C, LC_ALL: C}, check_rc: true}
+ rc: 0
+ out: "100\n"
+ err: ""
+- id: test_simple_element_get_not_found
+ input:
+ key: /desktop/gnome/background/picture_filename
+ output:
+ value: null
+ run_command_calls:
+ - command: [/testbin/gconftool-2, --get, /desktop/gnome/background/picture_filename]
+ environ: *env-def
+ rc: 0
+ out: ""
+ err: "No value set for `/desktop/gnome/background/picture_filename'\n"
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_gem.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_gem.py
index 92578e062..10c03e537 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_gem.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_gem.py
@@ -69,7 +69,7 @@ class TestGem(ModuleTestCase):
assert result['msg'] == "install_dir requires user_install=false"
def test_passes_install_dir_to_gem(self):
- # XXX: This test is extremely fragile, and makes assuptions about the module code, and how
+ # XXX: This test is extremely fragile, and makes assumptions about the module code, and how
# functions are run.
# If you start modifying the code of the module, you might need to modify what this
# test mocks. The only thing that matters is the assertion that this 'gem install' is
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_gio_mime.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_gio_mime.py
new file mode 100644
index 000000000..f2402ac35
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_gio_mime.py
@@ -0,0 +1,14 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) Alexei Znamensky (russoz@gmail.com)
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+from ansible_collections.community.general.plugins.modules import gio_mime
+from .helper import Helper
+
+
+Helper.from_module(gio_mime, __name__)
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_gio_mime.yaml b/ansible_collections/community/general/tests/unit/plugins/modules/test_gio_mime.yaml
new file mode 100644
index 000000000..d9e47a60e
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_gio_mime.yaml
@@ -0,0 +1,70 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) Alexei Znamensky (russoz@gmail.com)
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+---
+- id: test_set_handler
+ input:
+ handler: google-chrome.desktop
+ mime_type: x-scheme-handler/http
+ output:
+ handler: google-chrome.desktop
+ changed: true
+ run_command_calls:
+ - command: [/testbin/gio, mime, x-scheme-handler/http]
+ environ: &env-def {environ_update: {LANGUAGE: C, LC_ALL: C}, check_rc: true}
+ rc: 0
+ out: ""
+ err: >
+ No default applications for “x-scheme-handler/http”
+ - command: [/testbin/gio, mime, x-scheme-handler/http, google-chrome.desktop]
+ environ: *env-def
+ rc: 0
+ out: "Set google-chrome.desktop as the default for x-scheme-handler/http\n"
+ err: ""
+- id: test_set_handler_check
+ input:
+ handler: google-chrome.desktop
+ mime_type: x-scheme-handler/http
+ output:
+ handler: google-chrome.desktop
+ changed: true
+ flags:
+ skip: test helper does not support check mode yet
+ run_command_calls:
+ - command: [/testbin/gio, mime, x-scheme-handler/http]
+ environ: *env-def
+ rc: 0
+ out: ""
+ err: >
+ No default applications for “x-scheme-handler/http”
+ - command: [/testbin/gio, mime, x-scheme-handler/http, google-chrome.desktop]
+ environ: *env-def
+ rc: 0
+ out: "Set google-chrome.desktop as the default for x-scheme-handler/http\n"
+ err: ""
+- id: test_set_handler_idempot
+ input:
+ handler: google-chrome.desktop
+ mime_type: x-scheme-handler/http
+ output:
+ handler: google-chrome.desktop
+ changed: false
+ run_command_calls:
+ - command: [/testbin/gio, mime, x-scheme-handler/http]
+ environ: *env-def
+ rc: 0
+ out: |
+ Default application for “x-scheme-handler/https”: google-chrome.desktop
+ Registered applications:
+ brave-browser.desktop
+ firefox.desktop
+ google-chrome.desktop
+ firefox_firefox.desktop
+ Recommended applications:
+ brave-browser.desktop
+ firefox.desktop
+ google-chrome.desktop
+ firefox_firefox.desktop
+ err: ""
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_gitlab_group_access_token.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_gitlab_group_access_token.py
new file mode 100644
index 000000000..06af94820
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_gitlab_group_access_token.py
@@ -0,0 +1,107 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2023, Zoran Krleza (zoran.krleza@true-north.hr)
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+import gitlab
+
+from ansible_collections.community.general.plugins.modules.gitlab_group_access_token import GitLabGroupAccessToken
+
+# python-gitlab 3.1+ is needed for python-gitlab access tokens api
+PYTHON_GITLAB_MINIMAL_VERSION = (3, 1)
+
+
+def python_gitlab_version_match_requirement():
+ return tuple(map(int, gitlab.__version__.split('.'))) >= PYTHON_GITLAB_MINIMAL_VERSION
+
+
+def _dummy(x):
+ """Dummy function. Only used as a placeholder for toplevel definitions when the test is going
+ to be skipped anyway"""
+ return x
+
+
+pytestmark = []
+try:
+ from .gitlab import (GitlabModuleTestCase,
+ resp_get_user,
+ resp_get_group,
+ resp_list_group_access_tokens,
+ resp_create_group_access_tokens,
+ resp_revoke_group_access_tokens)
+
+except ImportError:
+ pytestmark.append(pytest.mark.skip("Could not load gitlab module required for testing"))
+ # Need to set these to something so that we don't fail when parsing
+ GitlabModuleTestCase = object
+ resp_list_group_access_tokens = _dummy
+ resp_create_group_access_tokens = _dummy
+ resp_revoke_group_access_tokens = _dummy
+ resp_get_user = _dummy
+ resp_get_group = _dummy
+
+# Unit tests requirements
+try:
+ from httmock import with_httmock # noqa
+except ImportError:
+ pytestmark.append(pytest.mark.skip("Could not load httmock module required for testing"))
+ with_httmock = _dummy
+
+
+class TestGitlabGroupAccessToken(GitlabModuleTestCase):
+ @with_httmock(resp_get_user)
+ def setUp(self):
+ super(TestGitlabGroupAccessToken, self).setUp()
+ if not python_gitlab_version_match_requirement():
+ self.skipTest("python-gitlab %s+ is needed for gitlab_group_access_token" % ",".join(map(str, PYTHON_GITLAB_MINIMAL_VERSION)))
+
+ self.moduleUtil = GitLabGroupAccessToken(module=self.mock_module, gitlab_instance=self.gitlab_instance)
+
+ @with_httmock(resp_get_group)
+ @with_httmock(resp_list_group_access_tokens)
+ def test_find_access_token(self):
+ group = self.gitlab_instance.groups.get(1)
+ self.assertIsNotNone(group)
+
+ rvalue = self.moduleUtil.find_access_token(group, "token1")
+ self.assertEqual(rvalue, False)
+ self.assertIsNotNone(self.moduleUtil.access_token_object)
+
+ @with_httmock(resp_get_group)
+ @with_httmock(resp_list_group_access_tokens)
+ def test_find_access_token_negative(self):
+ groups = self.gitlab_instance.groups.get(1)
+ self.assertIsNotNone(groups)
+
+ rvalue = self.moduleUtil.find_access_token(groups, "nonexisting")
+ self.assertEqual(rvalue, False)
+ self.assertIsNone(self.moduleUtil.access_token_object)
+
+ @with_httmock(resp_get_group)
+ @with_httmock(resp_create_group_access_tokens)
+ def test_create_access_token(self):
+ groups = self.gitlab_instance.groups.get(1)
+ self.assertIsNotNone(groups)
+
+ rvalue = self.moduleUtil.create_access_token(groups, {'name': "tokenXYZ", 'scopes': ["api"], 'access_level': 20, 'expires_at': "2024-12-31"})
+ self.assertEqual(rvalue, True)
+ self.assertIsNotNone(self.moduleUtil.access_token_object)
+
+ @with_httmock(resp_get_group)
+ @with_httmock(resp_list_group_access_tokens)
+ @with_httmock(resp_revoke_group_access_tokens)
+ def test_revoke_access_token(self):
+ groups = self.gitlab_instance.groups.get(1)
+ self.assertIsNotNone(groups)
+
+ rvalue = self.moduleUtil.find_access_token(groups, "token1")
+ self.assertEqual(rvalue, False)
+ self.assertIsNotNone(self.moduleUtil.access_token_object)
+
+ rvalue = self.moduleUtil.revoke_access_token()
+ self.assertEqual(rvalue, True)
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_gitlab_project_access_token.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_gitlab_project_access_token.py
new file mode 100644
index 000000000..ebc324b88
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_gitlab_project_access_token.py
@@ -0,0 +1,107 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2023, Zoran Krleza (zoran.krleza@true-north.hr)
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+import gitlab
+
+from ansible_collections.community.general.plugins.modules.gitlab_project_access_token import GitLabProjectAccessToken
+
+# python-gitlab 3.1+ is needed for python-gitlab access tokens api
+PYTHON_GITLAB_MINIMAL_VERSION = (3, 1)
+
+
+def python_gitlab_version_match_requirement():
+ return tuple(map(int, gitlab.__version__.split('.'))) >= PYTHON_GITLAB_MINIMAL_VERSION
+
+
+def _dummy(x):
+ """Dummy function. Only used as a placeholder for toplevel definitions when the test is going
+ to be skipped anyway"""
+ return x
+
+
+pytestmark = []
+try:
+ from .gitlab import (GitlabModuleTestCase,
+ resp_get_user,
+ resp_get_project,
+ resp_list_project_access_tokens,
+ resp_create_project_access_tokens,
+ resp_revoke_project_access_tokens)
+
+except ImportError:
+ pytestmark.append(pytest.mark.skip("Could not load gitlab module required for testing"))
+ # Need to set these to something so that we don't fail when parsing
+ GitlabModuleTestCase = object
+ resp_list_project_access_tokens = _dummy
+ resp_create_project_access_tokens = _dummy
+ resp_revoke_project_access_tokens = _dummy
+ resp_get_user = _dummy
+ resp_get_project = _dummy
+
+# Unit tests requirements
+try:
+ from httmock import with_httmock # noqa
+except ImportError:
+ pytestmark.append(pytest.mark.skip("Could not load httmock module required for testing"))
+ with_httmock = _dummy
+
+
+class TestGitlabProjectAccessToken(GitlabModuleTestCase):
+ @with_httmock(resp_get_user)
+ def setUp(self):
+ super(TestGitlabProjectAccessToken, self).setUp()
+ if not python_gitlab_version_match_requirement():
+ self.skipTest("python-gitlab %s+ is needed for gitlab_project_access_token" % ",".join(map(str, PYTHON_GITLAB_MINIMAL_VERSION)))
+
+ self.moduleUtil = GitLabProjectAccessToken(module=self.mock_module, gitlab_instance=self.gitlab_instance)
+
+ @with_httmock(resp_get_project)
+ @with_httmock(resp_list_project_access_tokens)
+ def test_find_access_token(self):
+ project = self.gitlab_instance.projects.get(1)
+ self.assertIsNotNone(project)
+
+ rvalue = self.moduleUtil.find_access_token(project, "token1")
+ self.assertEqual(rvalue, False)
+ self.assertIsNotNone(self.moduleUtil.access_token_object)
+
+ @with_httmock(resp_get_project)
+ @with_httmock(resp_list_project_access_tokens)
+ def test_find_access_token_negative(self):
+ project = self.gitlab_instance.projects.get(1)
+ self.assertIsNotNone(project)
+
+ rvalue = self.moduleUtil.find_access_token(project, "nonexisting")
+ self.assertEqual(rvalue, False)
+ self.assertIsNone(self.moduleUtil.access_token_object)
+
+ @with_httmock(resp_get_project)
+ @with_httmock(resp_create_project_access_tokens)
+ def test_create_access_token(self):
+ project = self.gitlab_instance.projects.get(1)
+ self.assertIsNotNone(project)
+
+ rvalue = self.moduleUtil.create_access_token(project, {'name': "tokenXYZ", 'scopes': ["api"], 'access_level': 20, 'expires_at': "2024-12-31"})
+ self.assertEqual(rvalue, True)
+ self.assertIsNotNone(self.moduleUtil.access_token_object)
+
+ @with_httmock(resp_get_project)
+ @with_httmock(resp_list_project_access_tokens)
+ @with_httmock(resp_revoke_project_access_tokens)
+ def test_revoke_access_token(self):
+ project = self.gitlab_instance.projects.get(1)
+ self.assertIsNotNone(project)
+
+ rvalue = self.moduleUtil.find_access_token(project, "token1")
+ self.assertEqual(rvalue, False)
+ self.assertIsNotNone(self.moduleUtil.access_token_object)
+
+ rvalue = self.moduleUtil.revoke_access_token()
+ self.assertEqual(rvalue, True)
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_hana_query.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_hana_query.py
deleted file mode 100644
index db06e4cef..000000000
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_hana_query.py
+++ /dev/null
@@ -1,103 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright (c) 2021, Rainer Leber (@rainerleber) <rainerleber@gmail.com>
-# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-from ansible_collections.community.general.plugins.modules import hana_query
-from ansible_collections.community.general.tests.unit.plugins.modules.utils import (
- AnsibleExitJson,
- AnsibleFailJson,
- ModuleTestCase,
- set_module_args,
-)
-from ansible_collections.community.general.tests.unit.compat.mock import patch
-from ansible.module_utils import basic
-
-
-def get_bin_path(*args, **kwargs):
- """Function to return path of hdbsql"""
- return "/usr/sap/HDB/HDB01/exe/hdbsql"
-
-
-class Testhana_query(ModuleTestCase):
- """Main class for testing hana_query module."""
-
- def setUp(self):
- """Setup."""
- super(Testhana_query, self).setUp()
- self.module = hana_query
- self.mock_get_bin_path = patch.object(basic.AnsibleModule, 'get_bin_path', get_bin_path)
- self.mock_get_bin_path.start()
- self.addCleanup(self.mock_get_bin_path.stop) # ensure that the patching is 'undone'
-
- def tearDown(self):
- """Teardown."""
- super(Testhana_query, self).tearDown()
-
- def test_without_required_parameters(self):
- """Failure must occurs when all parameters are missing."""
- with self.assertRaises(AnsibleFailJson):
- set_module_args({})
- self.module.main()
-
- def test_hana_query(self):
- """Check that result is processed."""
- set_module_args({
- 'sid': "HDB",
- 'instance': "01",
- 'encrypted': False,
- 'host': "localhost",
- 'user': "SYSTEM",
- 'password': "1234Qwer",
- 'database': "HDB",
- 'query': "SELECT * FROM users;"
- })
- with patch.object(basic.AnsibleModule, 'run_command') as run_command:
- run_command.return_value = 0, 'username,name\n testuser,test user \n myuser, my user \n', ''
- with self.assertRaises(AnsibleExitJson) as result:
- hana_query.main()
- self.assertEqual(result.exception.args[0]['query_result'], [[
- {'username': 'testuser', 'name': 'test user'},
- {'username': 'myuser', 'name': 'my user'},
- ]])
- self.assertEqual(run_command.call_count, 1)
-
- def test_hana_userstore_query(self):
- """Check that result is processed with userstore."""
- set_module_args({
- 'sid': "HDB",
- 'instance': "01",
- 'encrypted': False,
- 'host': "localhost",
- 'user': "SYSTEM",
- 'userstore': True,
- 'database': "HDB",
- 'query': "SELECT * FROM users;"
- })
- with patch.object(basic.AnsibleModule, 'run_command') as run_command:
- run_command.return_value = 0, 'username,name\n testuser,test user \n myuser, my user \n', ''
- with self.assertRaises(AnsibleExitJson) as result:
- hana_query.main()
- self.assertEqual(result.exception.args[0]['query_result'], [[
- {'username': 'testuser', 'name': 'test user'},
- {'username': 'myuser', 'name': 'my user'},
- ]])
- self.assertEqual(run_command.call_count, 1)
-
- def test_hana_failed_no_passwd(self):
- """Check that result is failed with no password."""
- with self.assertRaises(AnsibleFailJson):
- set_module_args({
- 'sid': "HDB",
- 'instance': "01",
- 'encrypted': False,
- 'host': "localhost",
- 'user': "SYSTEM",
- 'database': "HDB",
- 'query': "SELECT * FROM users;"
- })
- self.module.main()
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_ini_file.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_ini_file.py
new file mode 100644
index 000000000..a65a9c326
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_ini_file.py
@@ -0,0 +1,51 @@
+# Copyright (c) 2023 Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or
+# https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from ansible_collections.community.general.plugins.modules import ini_file
+
+
+def do_test(option, ignore_spaces, newline, before, expected_after,
+ expected_changed, expected_msg):
+ section_lines = [before]
+ changed_lines = [0]
+ changed, msg = ini_file.update_section_line(
+ option, None, section_lines, 0, changed_lines, ignore_spaces,
+ newline, None)
+ assert section_lines[0] == expected_after
+ assert changed == expected_changed
+ assert changed_lines[0] == 1
+ assert msg == expected_msg
+
+
+def test_ignore_spaces_comment():
+ oldline = ';foobar=baz'
+ newline = 'foobar = baz'
+ do_test('foobar', True, newline, oldline, newline, True, 'option changed')
+
+
+def test_ignore_spaces_changed():
+ oldline = 'foobar=baz'
+ newline = 'foobar = freeble'
+ do_test('foobar', True, newline, oldline, newline, True, 'option changed')
+
+
+def test_ignore_spaces_unchanged():
+ oldline = 'foobar=baz'
+ newline = 'foobar = baz'
+ do_test('foobar', True, newline, oldline, oldline, False, None)
+
+
+def test_no_ignore_spaces_changed():
+ oldline = 'foobar=baz'
+ newline = 'foobar = baz'
+ do_test('foobar', False, newline, oldline, newline, True, 'option changed')
+
+
+def test_no_ignore_spaces_unchanged():
+ newline = 'foobar=baz'
+ do_test('foobar', False, newline, newline, newline, False, None)
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_ipa_otptoken.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_ipa_otptoken.py
index c06e19c3b..23911e5a5 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_ipa_otptoken.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_ipa_otptoken.py
@@ -104,7 +104,7 @@ class TestIPAOTPToken(ModuleTestCase):
{
'method': 'otptoken_add',
'name': 'NewToken1',
- 'item': {'ipatokendisabled': 'FALSE',
+ 'item': {'ipatokendisabled': False,
'all': True}
}
)
@@ -130,7 +130,7 @@ class TestIPAOTPToken(ModuleTestCase):
{
'method': 'otptoken_add',
'name': 'NewToken1',
- 'item': {'ipatokendisabled': 'FALSE',
+ 'item': {'ipatokendisabled': False,
'all': True}
}
)
@@ -176,7 +176,7 @@ class TestIPAOTPToken(ModuleTestCase):
'ipatokenotpkey': 'KRSXG5CTMVRXEZLUGE======',
'description': 'Test description',
'ipatokenowner': 'pinky',
- 'ipatokendisabled': 'FALSE',
+ 'ipatokendisabled': False,
'ipatokennotbefore': '20200101010101Z',
'ipatokennotafter': '20900101010101Z',
'ipatokenvendor': 'Acme',
@@ -220,7 +220,7 @@ class TestIPAOTPToken(ModuleTestCase):
'ipatokenotpkey': [{'__base64__': 'VGVzdFNlY3JldDE='}],
'description': ['Test description'],
'ipatokenowner': ['pinky'],
- 'ipatokendisabled': ['FALSE'],
+ 'ipatokendisabled': [False],
'ipatokennotbefore': ['20200101010101Z'],
'ipatokennotafter': ['20900101010101Z'],
'ipatokenvendor': ['Acme'],
@@ -271,7 +271,7 @@ class TestIPAOTPToken(ModuleTestCase):
'ipatokenotpkey': [{'__base64__': 'VGVzdFNlY3JldDE='}],
'description': ['Test description'],
'ipatokenowner': ['pinky'],
- 'ipatokendisabled': ['FALSE'],
+ 'ipatokendisabled': [False],
'ipatokennotbefore': ['20200101010101Z'],
'ipatokennotafter': ['20900101010101Z'],
'ipatokenvendor': ['Acme'],
@@ -296,7 +296,7 @@ class TestIPAOTPToken(ModuleTestCase):
'name': 'NewToken1',
'item': {'description': 'Test description',
'ipatokenowner': 'brain',
- 'ipatokendisabled': 'FALSE',
+ 'ipatokendisabled': False,
'ipatokennotbefore': '20200101010101Z',
'ipatokennotafter': '20900101010101Z',
'ipatokenvendor': 'Acme',
@@ -335,7 +335,7 @@ class TestIPAOTPToken(ModuleTestCase):
'ipatokenotpkey': [{'__base64__': 'VGVzdFNlY3JldDE='}],
'description': ['Test description'],
'ipatokenowner': ['pinky'],
- 'ipatokendisabled': ['FALSE'],
+ 'ipatokendisabled': [False],
'ipatokennotbefore': ['20200101010101Z'],
'ipatokennotafter': ['20900101010101Z'],
'ipatokenvendor': ['Acme'],
@@ -360,7 +360,7 @@ class TestIPAOTPToken(ModuleTestCase):
'name': 'NewToken1',
'item': {'description': 'New Test description',
'ipatokenowner': 'pinky',
- 'ipatokendisabled': 'TRUE',
+ 'ipatokendisabled': True,
'ipatokennotbefore': '20200101010102Z',
'ipatokennotafter': '20900101010102Z',
'ipatokenvendor': 'NewAcme',
@@ -384,7 +384,7 @@ class TestIPAOTPToken(ModuleTestCase):
'ipatokenotpkey': [{'__base64__': 'KRSXG5CTMVRXEZLUGE======'}],
'description': ['Test description'],
'ipatokenowner': ['pinky'],
- 'ipatokendisabled': ['FALSE'],
+ 'ipatokendisabled': [False],
'ipatokennotbefore': ['20200101010101Z'],
'ipatokennotafter': ['20900101010101Z'],
'ipatokenvendor': ['Acme'],
@@ -425,7 +425,7 @@ class TestIPAOTPToken(ModuleTestCase):
'ipatokenotpkey': [{'__base64__': 'KRSXG5CTMVRXEZLUGE======'}],
'description': ['Test description'],
'ipatokenowner': ['pinky'],
- 'ipatokendisabled': ['FALSE'],
+ 'ipatokendisabled': [False],
'ipatokennotbefore': ['20200101010101Z'],
'ipatokennotafter': ['20900101010101Z'],
'ipatokenvendor': ['Acme'],
@@ -448,7 +448,7 @@ class TestIPAOTPToken(ModuleTestCase):
{
'method': 'otptoken_mod',
'name': 'NewToken1',
- 'item': {'ipatokendisabled': 'TRUE',
+ 'item': {'ipatokendisabled': True,
'all': True}
}
)
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_ipa_pwpolicy.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_ipa_pwpolicy.py
index b45c566fc..538f61e9a 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_ipa_pwpolicy.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_ipa_pwpolicy.py
@@ -100,7 +100,12 @@ class TestIPAPwPolicy(ModuleTestCase):
'minlength': '16',
'maxfailcount': '6',
'failinterval': '60',
- 'lockouttime': '600'
+ 'lockouttime': '600',
+ 'gracelimit': 3,
+ 'maxrepeat': 3,
+ 'maxsequence': 3,
+ 'dictcheck': True,
+ 'usercheck': True,
}
return_value = {}
mock_calls = (
@@ -124,7 +129,12 @@ class TestIPAPwPolicy(ModuleTestCase):
'krbpwdminlength': '16',
'krbpwdmaxfailure': '6',
'krbpwdfailurecountinterval': '60',
- 'krbpwdlockoutduration': '600'
+ 'krbpwdlockoutduration': '600',
+ 'passwordgracelimit': '3',
+ 'ipapwdmaxrepeat': '3',
+ 'ipapwdmaxsequence': '3',
+ 'ipapwddictcheck': True,
+ 'ipapwdusercheck': True,
}
}
)
@@ -145,7 +155,12 @@ class TestIPAPwPolicy(ModuleTestCase):
'minlength': '16',
'maxfailcount': '6',
'failinterval': '60',
- 'lockouttime': '600'
+ 'lockouttime': '600',
+ 'gracelimit': 3,
+ 'maxrepeat': 3,
+ 'maxsequence': 3,
+ 'dictcheck': True,
+ 'usercheck': True,
}
return_value = {}
mock_calls = (
@@ -169,7 +184,12 @@ class TestIPAPwPolicy(ModuleTestCase):
'krbpwdminlength': '16',
'krbpwdmaxfailure': '6',
'krbpwdfailurecountinterval': '60',
- 'krbpwdlockoutduration': '600'
+ 'krbpwdlockoutduration': '600',
+ 'passwordgracelimit': '3',
+ 'ipapwdmaxrepeat': '3',
+ 'ipapwdmaxsequence': '3',
+ 'ipapwddictcheck': True,
+ 'ipapwdusercheck': True,
}
}
)
@@ -190,7 +210,12 @@ class TestIPAPwPolicy(ModuleTestCase):
'minlength': '12',
'maxfailcount': '8',
'failinterval': '60',
- 'lockouttime': '600'
+ 'lockouttime': '600',
+ 'gracelimit': 3,
+ 'maxrepeat': 3,
+ 'maxsequence': 3,
+ 'dictcheck': True,
+ 'usercheck': True,
}
return_value = {
'cn': ['sysops'],
@@ -203,6 +228,11 @@ class TestIPAPwPolicy(ModuleTestCase):
'krbpwdmaxfailure': ['6'],
'krbpwdfailurecountinterval': ['60'],
'krbpwdlockoutduration': ['600'],
+ 'passwordgracelimit': ['3'],
+ 'ipapwdmaxrepeat': ['3'],
+ 'ipapwdmaxsequence': ['3'],
+ 'ipapwddictcheck': [True],
+ 'ipapwdusercheck': [True],
'dn': 'cn=sysops,cn=EXAMPLE.COM,cn=kerberos,dc=example,dc=com',
'objectclass': ['top', 'nscontainer', 'krbpwdpolicy']
}
@@ -227,7 +257,12 @@ class TestIPAPwPolicy(ModuleTestCase):
'krbpwdminlength': '12',
'krbpwdmaxfailure': '8',
'krbpwdfailurecountinterval': '60',
- 'krbpwdlockoutduration': '600'
+ 'krbpwdlockoutduration': '600',
+ 'passwordgracelimit': '3',
+ 'ipapwdmaxrepeat': '3',
+ 'ipapwdmaxsequence': '3',
+ 'ipapwddictcheck': True,
+ 'ipapwdusercheck': True,
}
}
)
@@ -248,7 +283,12 @@ class TestIPAPwPolicy(ModuleTestCase):
'minlength': '16',
'maxfailcount': '6',
'failinterval': '60',
- 'lockouttime': '600'
+ 'lockouttime': '600',
+ 'gracelimit': 3,
+ 'maxrepeat': 3,
+ 'maxsequence': 3,
+ 'dictcheck': True,
+ 'usercheck': True,
}
return_value = {
'cn': ['sysops'],
@@ -281,7 +321,12 @@ class TestIPAPwPolicy(ModuleTestCase):
'krbpwdminlength': '16',
'krbpwdmaxfailure': '6',
'krbpwdfailurecountinterval': '60',
- 'krbpwdlockoutduration': '600'
+ 'krbpwdlockoutduration': '600',
+ 'passwordgracelimit': '3',
+ 'ipapwdmaxrepeat': '3',
+ 'ipapwdmaxsequence': '3',
+ 'ipapwddictcheck': True,
+ 'ipapwdusercheck': True,
}
}
)
@@ -342,7 +387,12 @@ class TestIPAPwPolicy(ModuleTestCase):
'minlength': '16',
'maxfailcount': '6',
'failinterval': '60',
- 'lockouttime': '600'
+ 'lockouttime': '600',
+ 'gracelimit': 3,
+ 'maxrepeat': 3,
+ 'maxsequence': 3,
+ 'dictcheck': True,
+ 'usercheck': True,
}
return_value = {
'cn': ['admins'],
@@ -355,6 +405,11 @@ class TestIPAPwPolicy(ModuleTestCase):
'krbpwdmaxfailure': ['6'],
'krbpwdfailurecountinterval': ['60'],
'krbpwdlockoutduration': ['600'],
+ 'passwordgracelimit': ['3'],
+ 'ipapwdmaxrepeat': ['3'],
+ 'ipapwdmaxsequence': ['3'],
+ 'ipapwddictcheck': [True],
+ 'ipapwdusercheck': [True],
'dn': 'cn=admins,cn=EXAMPLE.COM,cn=kerberos,dc=example,dc=com',
'objectclass': ['top', 'nscontainer', 'krbpwdpolicy']
}
@@ -409,7 +464,12 @@ class TestIPAPwPolicy(ModuleTestCase):
'minlength': '12',
'maxfailcount': '8',
'failinterval': '60',
- 'lockouttime': '600'
+ 'lockouttime': '600',
+ 'gracelimit': 3,
+ 'maxrepeat': 3,
+ 'maxsequence': 3,
+ 'dictcheck': True,
+ 'usercheck': True,
}
return_value = {
'cn': ['global_policy'],
@@ -420,6 +480,11 @@ class TestIPAPwPolicy(ModuleTestCase):
'krbpwdmaxfailure': ['6'],
'krbpwdfailurecountinterval': ['60'],
'krbpwdlockoutduration': ['600'],
+ 'passwordgracelimit': ['3'],
+ 'ipapwdmaxrepeat': ['3'],
+ 'ipapwdmaxsequence': ['3'],
+ 'ipapwddictcheck': [True],
+ 'ipapwdusercheck': [True],
'dn': 'cn=global_policy,cn=EXAMPLE.COM,cn=kerberos,dc=example,dc=com',
'objectclass': ['top', 'nscontainer', 'krbpwdpolicy']
}
@@ -443,7 +508,12 @@ class TestIPAPwPolicy(ModuleTestCase):
'krbpwdminlength': '12',
'krbpwdmaxfailure': '8',
'krbpwdfailurecountinterval': '60',
- 'krbpwdlockoutduration': '600'
+ 'krbpwdlockoutduration': '600',
+ 'passwordgracelimit': '3',
+ 'ipapwdmaxrepeat': '3',
+ 'ipapwdmaxsequence': '3',
+ 'ipapwddictcheck': True,
+ 'ipapwdusercheck': True,
}
}
)
@@ -461,7 +531,12 @@ class TestIPAPwPolicy(ModuleTestCase):
'minlength': '16',
'maxfailcount': '6',
'failinterval': '60',
- 'lockouttime': '600'
+ 'lockouttime': '600',
+ 'gracelimit': 3,
+ 'maxrepeat': 3,
+ 'maxsequence': 3,
+ 'dictcheck': True,
+ 'usercheck': True,
}
return_value = {
'cn': ['global_policy'],
@@ -473,6 +548,11 @@ class TestIPAPwPolicy(ModuleTestCase):
'krbpwdmaxfailure': ['6'],
'krbpwdfailurecountinterval': ['60'],
'krbpwdlockoutduration': ['600'],
+ 'passwordgracelimit': ['3'],
+ 'ipapwdmaxrepeat': ['3'],
+ 'ipapwdmaxsequence': ['3'],
+ 'ipapwddictcheck': [True],
+ 'ipapwdusercheck': [True],
'dn': 'cn=global_policy,cn=EXAMPLE.COM,cn=kerberos,dc=example,dc=com',
'objectclass': ['top', 'nscontainer', 'krbpwdpolicy']
}
@@ -504,7 +584,12 @@ class TestIPAPwPolicy(ModuleTestCase):
'minlength': '16',
'maxfailcount': '6',
'failinterval': '60',
- 'lockouttime': '600'
+ 'lockouttime': '600',
+ 'gracelimit': 3,
+ 'maxrepeat': 3,
+ 'maxsequence': 3,
+ 'dictcheck': True,
+ 'usercheck': True,
}
return_value = {}
mock_calls = [
@@ -535,7 +620,12 @@ class TestIPAPwPolicy(ModuleTestCase):
'minlength': '12',
'maxfailcount': '8',
'failinterval': '60',
- 'lockouttime': '600'
+ 'lockouttime': '600',
+ 'gracelimit': 3,
+ 'maxrepeat': 3,
+ 'maxsequence': 3,
+ 'dictcheck': True,
+ 'usercheck': True,
}
return_value = {
'cn': ['sysops'],
@@ -548,6 +638,11 @@ class TestIPAPwPolicy(ModuleTestCase):
'krbpwdmaxfailure': ['6'],
'krbpwdfailurecountinterval': ['60'],
'krbpwdlockoutduration': ['600'],
+ 'passwordgracelimit': ['3'],
+ 'ipapwdmaxrepeat': ['3'],
+ 'ipapwdmaxsequence': ['3'],
+ 'ipapwddictcheck': [True],
+ 'ipapwdusercheck': [True],
'dn': 'cn=sysops,cn=EXAMPLE.COM,cn=kerberos,dc=example,dc=com',
'objectclass': ['top', 'nscontainer', 'krbpwdpolicy']
}
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_ipbase.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_ipbase.py
new file mode 100644
index 000000000..8106889da
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_ipbase.py
@@ -0,0 +1,187 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) Ansible project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import json
+
+from ansible_collections.community.general.plugins.modules.ipbase_info import IpbaseInfo
+from ansible_collections.community.general.tests.unit.compat import unittest
+from ansible_collections.community.general.tests.unit.compat.mock import Mock
+
+
+IPBASE_DATA = {
+ "response": b"""
+{
+ "data": {
+ "ip": "1.1.1.1",
+ "hostname": "one.one.one.one",
+ "type": "v4",
+ "range_type": {
+ "type": "PUBLIC",
+ "description": "Public address"
+ },
+ "connection": {
+ "asn": 13335,
+ "organization": "Cloudflare, Inc.",
+ "isp": "APNIC Research and Development",
+ "range": "1.1.1.1/32"
+ },
+ "location": {
+ "geonames_id": 5332870,
+ "latitude": 34.053611755371094,
+ "longitude": -118.24549865722656,
+ "zip": "90012",
+ "continent": {
+ "code": "NA",
+ "name": "North America",
+ "name_translated": "North America"
+ },
+ "country": {
+ "alpha2": "US",
+ "alpha3": "USA",
+ "calling_codes": [
+ "+1"
+ ],
+ "currencies": [
+ {
+ "symbol": "$",
+ "name": "US Dollar",
+ "symbol_native": "$",
+ "decimal_digits": 2,
+ "rounding": 0,
+ "code": "USD",
+ "name_plural": "US dollars"
+ }
+ ],
+ "emoji": "...",
+ "ioc": "USA",
+ "languages": [
+ {
+ "name": "English",
+ "name_native": "English"
+ }
+ ],
+ "name": "United States",
+ "name_translated": "United States",
+ "timezones": [
+ "America/New_York",
+ "America/Detroit",
+ "America/Kentucky/Louisville",
+ "America/Kentucky/Monticello",
+ "America/Indiana/Indianapolis",
+ "America/Indiana/Vincennes",
+ "America/Indiana/Winamac",
+ "America/Indiana/Marengo",
+ "America/Indiana/Petersburg",
+ "America/Indiana/Vevay",
+ "America/Chicago",
+ "America/Indiana/Tell_City",
+ "America/Indiana/Knox",
+ "America/Menominee",
+ "America/North_Dakota/Center",
+ "America/North_Dakota/New_Salem",
+ "America/North_Dakota/Beulah",
+ "America/Denver",
+ "America/Boise",
+ "America/Phoenix",
+ "America/Los_Angeles",
+ "America/Anchorage",
+ "America/Juneau",
+ "America/Sitka",
+ "America/Metlakatla",
+ "America/Yakutat",
+ "America/Nome",
+ "America/Adak",
+ "Pacific/Honolulu"
+ ],
+ "is_in_european_union": false,
+ "fips": "US",
+ "geonames_id": 6252001,
+ "hasc_id": "US",
+ "wikidata_id": "Q30"
+ },
+ "city": {
+ "fips": "644000",
+ "alpha2": null,
+ "geonames_id": 5368753,
+ "hasc_id": null,
+ "wikidata_id": "Q65",
+ "name": "Los Angeles",
+ "name_translated": "Los Angeles"
+ },
+ "region": {
+ "fips": "US06",
+ "alpha2": "US-CA",
+ "geonames_id": 5332921,
+ "hasc_id": "US.CA",
+ "wikidata_id": "Q99",
+ "name": "California",
+ "name_translated": "California"
+ }
+ },
+ "tlds": [
+ ".us"
+ ],
+ "timezone": {
+ "id": "America/Los_Angeles",
+ "current_time": "2023-05-04T04:30:28-07:00",
+ "code": "PDT",
+ "is_daylight_saving": true,
+ "gmt_offset": -25200
+ },
+ "security": {
+ "is_anonymous": false,
+ "is_datacenter": false,
+ "is_vpn": false,
+ "is_bot": false,
+ "is_abuser": true,
+ "is_known_attacker": true,
+ "is_proxy": false,
+ "is_spam": false,
+ "is_tor": false,
+ "is_icloud_relay": false,
+ "threat_score": 100
+ },
+ "domains": {
+ "count": 10943,
+ "domains": [
+ "eliwise.academy",
+ "accountingprose.academy",
+ "pistola.academy",
+ "1and1-test-ntlds-fr.accountant",
+ "omnergy.africa"
+ ]
+ }
+ }
+}
+"""
+}
+
+
+class TestIpbase(unittest.TestCase):
+ def test_info(self,):
+ "test the json data extraction"
+
+ params = {
+ "ip": "1.1.1.1",
+ "apikey": "aaa",
+ "hostname": True,
+ "language": "de",
+ }
+ module = Mock()
+ module.params = params
+
+ data = json.loads(IPBASE_DATA['response'].decode("utf-8"))
+
+ IpbaseInfo._get_url_data = Mock()
+ IpbaseInfo._get_url_data.return_value = data
+ jenkins_plugin = IpbaseInfo(module)
+
+ json_data = jenkins_plugin.info()
+
+ self.maxDiff = None
+ self.assertDictEqual(json_data, data)
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_jenkins_build.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_jenkins_build.py
index 44c6307ac..d9013a018 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_jenkins_build.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_jenkins_build.py
@@ -75,6 +75,11 @@ class JenkinsMock():
def get_build_info(self, name, build_number):
if name == "host-delete":
raise jenkins.JenkinsException("job {0} number {1} does not exist".format(name, build_number))
+ elif name == "create-detached":
+ return {
+ "building": True,
+ "result": None
+ }
return {
"building": True,
"result": "SUCCESS"
@@ -222,3 +227,38 @@ class TestJenkinsBuild(unittest.TestCase):
"token": "xyz"
})
jenkins_build.main()
+
+ @patch('ansible_collections.community.general.plugins.modules.jenkins_build.test_dependencies')
+ @patch('ansible_collections.community.general.plugins.modules.jenkins_build.JenkinsBuild.get_jenkins_connection')
+ @patch('ansible_collections.community.general.plugins.modules.jenkins_build.JenkinsBuild.get_build_status')
+ def test_module_create_build_without_detach(self, build_status, jenkins_connection, test_deps):
+ test_deps.return_value = None
+ jenkins_connection.return_value = JenkinsMock()
+ build_status.return_value = JenkinsBuildMock().get_build_status()
+
+ with self.assertRaises(AnsibleExitJson) as return_json:
+ set_module_args({
+ "name": "create-detached",
+ "user": "abc",
+ "token": "xyz"
+ })
+ jenkins_build.main()
+
+ self.assertFalse(return_json.exception.args[0]['changed'])
+
+ @patch('ansible_collections.community.general.plugins.modules.jenkins_build.test_dependencies')
+ @patch('ansible_collections.community.general.plugins.modules.jenkins_build.JenkinsBuild.get_jenkins_connection')
+ def test_module_create_build_detached(self, jenkins_connection, test_deps):
+ test_deps.return_value = None
+ jenkins_connection.return_value = JenkinsMock()
+
+ with self.assertRaises(AnsibleExitJson) as return_json:
+ set_module_args({
+ "name": "create-detached",
+ "user": "abc",
+ "token": "xyz",
+ "detach": True
+ })
+ jenkins_build.main()
+
+ self.assertTrue(return_json.exception.args[0]['changed'])
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_jenkins_build_info.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_jenkins_build_info.py
new file mode 100644
index 000000000..b5d4126fe
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_jenkins_build_info.py
@@ -0,0 +1,180 @@
+# Copyright (c) Ansible project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from ansible_collections.community.general.tests.unit.compat import unittest
+from ansible_collections.community.general.tests.unit.compat.mock import patch
+from ansible.module_utils import basic
+from ansible.module_utils.common.text.converters import to_bytes
+from ansible_collections.community.general.plugins.modules import jenkins_build_info
+
+import json
+
+
+def set_module_args(args):
+ """prepare arguments so that they will be picked up during module creation"""
+ args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
+ basic._ANSIBLE_ARGS = to_bytes(args)
+
+
+class AnsibleExitJson(Exception):
+ """Exception class to be raised by module.exit_json and caught by the test case"""
+ pass
+
+
+class AnsibleFailJson(Exception):
+ """Exception class to be raised by module.fail_json and caught by the test case"""
+ pass
+
+
+def exit_json(*args, **kwargs):
+ """function to patch over exit_json; package return data into an exception"""
+ if 'changed' not in kwargs:
+ kwargs['changed'] = False
+ raise AnsibleExitJson(kwargs)
+
+
+def fail_json(*args, **kwargs):
+ """function to patch over fail_json; package return data into an exception"""
+ kwargs['failed'] = True
+ raise AnsibleFailJson(kwargs)
+
+
+class jenkins:
+ class JenkinsException(Exception):
+ pass
+
+
+class JenkinsBuildMock():
+ def __init__(self, name, build_number=None):
+ self.name = name
+ self.build_number = build_number
+
+ def get_build_status(self):
+ try:
+ instance = JenkinsMock()
+ response = JenkinsMock.get_build_info(instance, self.name, self.build_number)
+ return response
+ except jenkins.JenkinsException:
+ response = {}
+ response["result"] = "ABSENT"
+ return response
+ except Exception as e:
+ fail_json(msg='Unable to fetch build information, {0}'.format(e))
+
+
+class JenkinsMock():
+
+ def get_build_info(self, name, build_number):
+ if name == "job-absent":
+ raise jenkins.JenkinsException()
+
+ return {
+ "result": "SUCCESS",
+ "build_info": {}
+ }
+
+ def get_job_info(self, name):
+ if name == "job-absent":
+ raise jenkins.JenkinsException()
+
+ return {
+ "lastBuild": {
+ "number": 123
+ }
+ }
+
+
+class TestJenkinsBuildInfo(unittest.TestCase):
+
+ def setUp(self):
+ self.mock_module_helper = patch.multiple(basic.AnsibleModule,
+ exit_json=exit_json,
+ fail_json=fail_json)
+ self.mock_module_helper.start()
+ self.addCleanup(self.mock_module_helper.stop)
+
+ @patch('ansible_collections.community.general.plugins.modules.jenkins_build_info.test_dependencies')
+ def test_module_fail_when_required_args_missing(self, test_deps):
+ test_deps.return_value = None
+ with self.assertRaises(AnsibleFailJson):
+ set_module_args({})
+ jenkins_build_info.main()
+
+ @patch('ansible_collections.community.general.plugins.modules.jenkins_build_info.test_dependencies')
+ @patch('ansible_collections.community.general.plugins.modules.jenkins_build_info.JenkinsBuildInfo.get_jenkins_connection')
+ def test_module_get_build_info(self, jenkins_connection, test_deps):
+ test_deps.return_value = None
+ jenkins_connection.return_value = JenkinsMock()
+
+ with self.assertRaises(AnsibleExitJson) as return_json:
+ set_module_args({
+ "name": "job-present",
+ "user": "abc",
+ "token": "xyz",
+ "build_number": 30
+ })
+ jenkins_build_info.main()
+
+ self.assertFalse(return_json.exception.args[0]["changed"])
+
+ @patch('ansible_collections.community.general.plugins.modules.jenkins_build_info.test_dependencies')
+ @patch('ansible_collections.community.general.plugins.modules.jenkins_build_info.JenkinsBuildInfo.get_jenkins_connection')
+ @patch('ansible_collections.community.general.plugins.modules.jenkins_build_info.JenkinsBuildInfo.get_build_status')
+ def test_module_get_build_info_if_build_does_not_exist(self, build_status, jenkins_connection, test_deps):
+ test_deps.return_value = None
+ jenkins_connection.return_value = JenkinsMock()
+ build_status.return_value = JenkinsBuildMock("job-absent", 30).get_build_status()
+
+ with self.assertRaises(AnsibleExitJson) as return_json:
+ set_module_args({
+ "name": "job-absent",
+ "user": "abc",
+ "token": "xyz",
+ "build_number": 30
+ })
+ jenkins_build_info.main()
+
+ self.assertFalse(return_json.exception.args[0]['changed'])
+ self.assertTrue(return_json.exception.args[0]['failed'])
+ self.assertEqual("ABSENT", return_json.exception.args[0]['build_info']['result'])
+
+ @patch('ansible_collections.community.general.plugins.modules.jenkins_build_info.test_dependencies')
+ @patch('ansible_collections.community.general.plugins.modules.jenkins_build_info.JenkinsBuildInfo.get_jenkins_connection')
+ def test_module_get_build_info_get_last_build(self, jenkins_connection, test_deps):
+ test_deps.return_value = None
+ jenkins_connection.return_value = JenkinsMock()
+
+ with self.assertRaises(AnsibleExitJson) as return_json:
+ set_module_args({
+ "name": "job-present",
+ "user": "abc",
+ "token": "xyz"
+ })
+ jenkins_build_info.main()
+
+ self.assertFalse(return_json.exception.args[0]['changed'])
+ self.assertEqual("SUCCESS", return_json.exception.args[0]['build_info']['result'])
+
+ @patch('ansible_collections.community.general.plugins.modules.jenkins_build_info.test_dependencies')
+ @patch('ansible_collections.community.general.plugins.modules.jenkins_build_info.JenkinsBuildInfo.get_jenkins_connection')
+ @patch('ansible_collections.community.general.plugins.modules.jenkins_build_info.JenkinsBuildInfo.get_build_status')
+ def test_module_get_build_info_if_job_does_not_exist(self, build_status, jenkins_connection, test_deps):
+ test_deps.return_value = None
+ jenkins_connection.return_value = JenkinsMock()
+ build_status.return_value = JenkinsBuildMock("job-absent").get_build_status()
+
+ with self.assertRaises(AnsibleExitJson) as return_json:
+ set_module_args({
+ "name": "job-absent",
+ "user": "abc",
+ "token": "xyz"
+ })
+ jenkins_build_info.main()
+
+ self.assertFalse(return_json.exception.args[0]['changed'])
+ self.assertTrue(return_json.exception.args[0]['failed'])
+ self.assertEqual("ABSENT", return_json.exception.args[0]['build_info']['result'])
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_keycloak_authentication_required_actions.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_keycloak_authentication_required_actions.py
new file mode 100644
index 000000000..2adc3a896
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_keycloak_authentication_required_actions.py
@@ -0,0 +1,835 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from contextlib import contextmanager
+
+from ansible_collections.community.general.tests.unit.compat import unittest
+from ansible_collections.community.general.tests.unit.compat.mock import patch
+from ansible_collections.community.general.tests.unit.plugins.modules.utils import AnsibleExitJson, ModuleTestCase, set_module_args
+
+from ansible_collections.community.general.plugins.modules import keycloak_authentication_required_actions
+
+from itertools import count
+
+from ansible.module_utils.six import StringIO
+
+
+@contextmanager
+def patch_keycloak_api(
+ get_required_actions=None,
+ register_required_action=None,
+ update_required_action=None,
+ delete_required_action=None,
+):
+ """
+ Mock context manager for patching the methods in PwPolicyIPAClient that contact the IPA server
+
+ Patches the `login` and `_post_json` methods
+
+ Keyword arguments are passed to the mock object that patches `_post_json`
+
+ No arguments are passed to the mock object that patches `login` because no tests require it
+
+ Example::
+
+ with patch_ipa(return_value={}) as (mock_login, mock_post):
+ ...
+ """
+
+ obj = keycloak_authentication_required_actions.KeycloakAPI
+ with patch.object(
+ obj,
+ 'get_required_actions',
+ side_effect=get_required_actions
+ ) as mock_get_required_actions:
+ with patch.object(
+ obj,
+ 'register_required_action',
+ side_effect=register_required_action
+ ) as mock_register_required_action:
+ with patch.object(
+ obj,
+ 'update_required_action',
+ side_effect=update_required_action
+ ) as mock_update_required_action:
+ with patch.object(
+ obj,
+ 'delete_required_action',
+ side_effect=delete_required_action
+ ) as mock_delete_required_action:
+ yield (
+ mock_get_required_actions,
+ mock_register_required_action,
+ mock_update_required_action,
+ mock_delete_required_action,
+ )
+
+
+def get_response(object_with_future_response, method, get_id_call_count):
+ if callable(object_with_future_response):
+ return object_with_future_response()
+ if isinstance(object_with_future_response, dict):
+ return get_response(
+ object_with_future_response[method], method, get_id_call_count)
+ if isinstance(object_with_future_response, list):
+ call_number = next(get_id_call_count)
+ return get_response(
+ object_with_future_response[call_number], method, get_id_call_count)
+ return object_with_future_response
+
+
+def build_mocked_request(get_id_user_count, response_dict):
+ def _mocked_requests(*args, **kwargs):
+ url = args[0]
+ method = kwargs['method']
+ future_response = response_dict.get(url, None)
+ return get_response(future_response, method, get_id_user_count)
+ return _mocked_requests
+
+
+def create_wrapper(text_as_string):
+ """Allow to mock many times a call to one address.
+ Without this function, the StringIO is empty for the second call.
+ """
+ def _create_wrapper():
+ return StringIO(text_as_string)
+ return _create_wrapper
+
+
+def mock_good_connection():
+ token_response = {
+ 'http://keycloak.url/auth/realms/master/protocol/openid-connect/token': create_wrapper('{"access_token": "alongtoken"}'), }
+ return patch(
+ 'ansible_collections.community.general.plugins.module_utils.identity.keycloak.keycloak.open_url',
+ side_effect=build_mocked_request(count(), token_response),
+ autospec=True
+ )
+
+
+class TestKeycloakAuthentication(ModuleTestCase):
+ def setUp(self):
+ super(TestKeycloakAuthentication, self).setUp()
+ self.module = keycloak_authentication_required_actions
+
+ def test_register_required_action(self):
+ """Register a new authentication required action."""
+
+ module_args = {
+ 'auth_client_id': 'admin-cli',
+ 'auth_keycloak_url': 'http://keycloak.url/auth',
+ 'auth_password': 'admin',
+ 'auth_realm': 'master',
+ 'auth_username': 'admin',
+ 'realm': 'master',
+ 'required_actions': [
+ {
+ 'alias': 'test-provider-id',
+ 'name': 'Test provider ID',
+ 'providerId': 'test-provider-id',
+ },
+ {
+ 'alias': 'test-provider-id',
+ 'name': 'Test provider ID (DUPLICATE ALIAS)',
+ 'providerId': 'test-provider-id',
+ },
+ {
+ 'alias': 'test-provider-id',
+ 'name': 'Test provider ID (DIFFERENT PROVIDER ID)',
+ 'providerId': 'test-provider-id-diff',
+ },
+ ],
+ 'state': 'present',
+ }
+
+ return_value_required_actions = [
+ [
+ {
+ 'alias': 'CONFIGURE_TOTP',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Configure OTP',
+ 'priority': 10,
+ 'providerId': 'CONFIGURE_TOTP'
+ },
+ {
+ 'alias': 'TERMS_AND_CONDITIONS',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Terms and conditions',
+ 'priority': 20,
+ 'providerId': 'TERMS_AND_CONDITIONS'
+ },
+ {
+ 'alias': 'UPDATE_PASSWORD',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Update Password',
+ 'priority': 30,
+ 'providerId': 'UPDATE_PASSWORD'
+ },
+ {
+ 'alias': 'UPDATE_PROFILE',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Update Profile',
+ 'priority': 40,
+ 'providerId': 'UPDATE_PROFILE'
+ },
+ {
+ 'alias': 'VERIFY_EMAIL',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Verify Email',
+ 'priority': 50,
+ 'providerId': 'VERIFY_EMAIL'
+ },
+ {
+ 'alias': 'delete_account',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': False,
+ 'name': 'Delete Account',
+ 'priority': 60,
+ 'providerId': 'delete_account'
+ },
+ {
+ 'alias': 'webauthn-register',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Webauthn Register',
+ 'priority': 70,
+ 'providerId': 'webauthn-register'
+ },
+ {
+ 'alias': 'webauthn-register-passwordless',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Webauthn Register Passwordless',
+ 'priority': 80,
+ 'providerId': 'webauthn-register-passwordless'
+ },
+ {
+ 'alias': 'update_user_locale',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Update User Locale',
+ 'priority': 1000,
+ 'providerId': 'update_user_locale'
+ }
+ ],
+ ]
+
+ changed = True
+
+ set_module_args(module_args)
+
+ # Run the module
+ with mock_good_connection():
+ with patch_keycloak_api(
+ get_required_actions=return_value_required_actions,
+ ) as (
+ mock_get_required_actions,
+ mock_register_required_action,
+ mock_update_required_action,
+ mock_delete_required_action,
+ ):
+ with self.assertRaises(AnsibleExitJson) as exec_info:
+ self.module.main()
+
+ # Verify number of call on each mock
+ self.assertEqual(len(mock_get_required_actions.mock_calls), 1)
+ self.assertEqual(len(mock_update_required_action.mock_calls), 1)
+ self.assertEqual(len(mock_register_required_action.mock_calls), 1)
+ self.assertEqual(len(mock_delete_required_action.mock_calls), 0)
+
+ # Verify that the module's changed status matches what is expected
+ self.assertIs(exec_info.exception.args[0]['changed'], changed)
+
+ def test_register_required_action_idempotency(self):
+ """Register an already existing new authentication required action again."""
+
+ module_args = {
+ 'auth_client_id': 'admin-cli',
+ 'auth_keycloak_url': 'http://keycloak.url/auth',
+ 'auth_password': 'admin',
+ 'auth_realm': 'master',
+ 'auth_username': 'admin',
+ 'realm': 'master',
+ 'required_actions': [
+ {
+ 'alias': 'test-provider-id',
+ 'name': 'Test provider ID',
+ 'providerId': 'test-provider-id',
+ },
+ {
+ 'alias': 'test-provider-id',
+ 'name': 'Test provider ID (DUPLICATE ALIAS)',
+ 'providerId': 'test-provider-id',
+ },
+ {
+ 'alias': 'test-provider-id',
+ 'name': 'Test provider ID (DIFFERENT PROVIDER ID)',
+ 'providerId': 'test-provider-id-diff',
+ },
+ ],
+ 'state': 'present',
+ }
+
+ return_value_required_actions = [
+ [
+ {
+ 'alias': 'CONFIGURE_TOTP',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Configure OTP',
+ 'priority': 10,
+ 'providerId': 'CONFIGURE_TOTP'
+ },
+ {
+ 'alias': 'TERMS_AND_CONDITIONS',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Terms and conditions',
+ 'priority': 20,
+ 'providerId': 'TERMS_AND_CONDITIONS'
+ },
+ {
+ 'alias': 'UPDATE_PASSWORD',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Update Password',
+ 'priority': 30,
+ 'providerId': 'UPDATE_PASSWORD'
+ },
+ {
+ 'alias': 'UPDATE_PROFILE',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Update Profile',
+ 'priority': 40,
+ 'providerId': 'UPDATE_PROFILE'
+ },
+ {
+ 'alias': 'VERIFY_EMAIL',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Verify Email',
+ 'priority': 50,
+ 'providerId': 'VERIFY_EMAIL'
+ },
+ {
+ 'alias': 'delete_account',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': False,
+ 'name': 'Delete Account',
+ 'priority': 60,
+ 'providerId': 'delete_account'
+ },
+ {
+ 'alias': 'webauthn-register',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Webauthn Register',
+ 'priority': 70,
+ 'providerId': 'webauthn-register'
+ },
+ {
+ 'alias': 'webauthn-register-passwordless',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Webauthn Register Passwordless',
+ 'priority': 80,
+ 'providerId': 'webauthn-register-passwordless'
+ },
+ {
+ 'alias': 'test-provider-id',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': False,
+ 'name': 'Test provider ID',
+ 'priority': 90,
+ 'providerId': 'test-provider-id'
+ },
+ {
+ 'alias': 'update_user_locale',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Update User Locale',
+ 'priority': 1000,
+ 'providerId': 'update_user_locale'
+ }
+ ],
+ ]
+
+ changed = False
+
+ set_module_args(module_args)
+
+ # Run the module
+ with mock_good_connection():
+ with patch_keycloak_api(
+ get_required_actions=return_value_required_actions,
+ ) as (
+ mock_get_required_actions,
+ mock_register_required_action,
+ mock_update_required_action,
+ mock_delete_required_action,
+ ):
+ with self.assertRaises(AnsibleExitJson) as exec_info:
+ self.module.main()
+
+ # Verify number of call on each mock
+ self.assertEqual(len(mock_get_required_actions.mock_calls), 1)
+ self.assertEqual(len(mock_update_required_action.mock_calls), 0)
+ self.assertEqual(len(mock_register_required_action.mock_calls), 0)
+ self.assertEqual(len(mock_delete_required_action.mock_calls), 0)
+
+ # Verify that the module's changed status matches what is expected
+ self.assertIs(exec_info.exception.args[0]['changed'], changed)
+
+ def test_update_required_actions(self):
+ """Update an authentication required action."""
+
+ module_args = {
+ 'auth_client_id': 'admin-cli',
+ 'auth_keycloak_url': 'http://keycloak.url/auth',
+ 'auth_password': 'admin',
+ 'auth_realm': 'master',
+ 'auth_username': 'admin',
+ 'realm': 'master',
+ 'required_actions': [
+ {
+ 'alias': 'test-provider-id',
+ 'name': 'Test provider ID UPDATED',
+ 'providerId': 'test-provider-id',
+ },
+ {
+ 'alias': 'test-provider-id',
+ 'name': 'Test provider ID UPDATED (DUPLICATE ALIAS)',
+ 'providerId': 'test-provider-id',
+ },
+ {
+ 'alias': 'test-provider-id',
+ 'name': 'Test provider ID UPDATED (DIFFERENT PROVIDER ID)',
+ 'providerId': 'test-provider-id-diff',
+ },
+ ],
+ 'state': 'present',
+ }
+
+ return_value_required_actions = [
+ [
+ {
+ 'alias': 'CONFIGURE_TOTP',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Configure OTP',
+ 'priority': 10,
+ 'providerId': 'CONFIGURE_TOTP'
+ },
+ {
+ 'alias': 'TERMS_AND_CONDITIONS',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Terms and conditions',
+ 'priority': 20,
+ 'providerId': 'TERMS_AND_CONDITIONS'
+ },
+ {
+ 'alias': 'UPDATE_PASSWORD',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Update Password',
+ 'priority': 30,
+ 'providerId': 'UPDATE_PASSWORD'
+ },
+ {
+ 'alias': 'UPDATE_PROFILE',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Update Profile',
+ 'priority': 40,
+ 'providerId': 'UPDATE_PROFILE'
+ },
+ {
+ 'alias': 'VERIFY_EMAIL',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Verify Email',
+ 'priority': 50,
+ 'providerId': 'VERIFY_EMAIL'
+ },
+ {
+ 'alias': 'delete_account',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': False,
+ 'name': 'Delete Account',
+ 'priority': 60,
+ 'providerId': 'delete_account'
+ },
+ {
+ 'alias': 'webauthn-register',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Webauthn Register',
+ 'priority': 70,
+ 'providerId': 'webauthn-register'
+ },
+ {
+ 'alias': 'webauthn-register-passwordless',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Webauthn Register Passwordless',
+ 'priority': 80,
+ 'providerId': 'webauthn-register-passwordless'
+ },
+ {
+ 'alias': 'test-provider-id',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': False,
+ 'name': 'Test provider ID',
+ 'priority': 90,
+ 'providerId': 'test-provider-id'
+ },
+ {
+ 'alias': 'update_user_locale',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Update User Locale',
+ 'priority': 1000,
+ 'providerId': 'update_user_locale'
+ }
+ ],
+ ]
+
+ changed = True
+
+ set_module_args(module_args)
+
+ # Run the module
+ with mock_good_connection():
+ with patch_keycloak_api(
+ get_required_actions=return_value_required_actions,
+ ) as (
+ mock_get_required_actions,
+ mock_register_required_action,
+ mock_update_required_action,
+ mock_delete_required_action,
+ ):
+ with self.assertRaises(AnsibleExitJson) as exec_info:
+ self.module.main()
+
+ # Verify number of call on each mock
+ self.assertEqual(len(mock_get_required_actions.mock_calls), 1)
+ self.assertEqual(len(mock_update_required_action.mock_calls), 1)
+ self.assertEqual(len(mock_register_required_action.mock_calls), 0)
+ self.assertEqual(len(mock_delete_required_action.mock_calls), 0)
+
+ # Verify that the module's changed status matches what is expected
+ self.assertIs(exec_info.exception.args[0]['changed'], changed)
+
+ def test_delete_required_action(self):
+ """Delete a registered authentication required action."""
+
+ module_args = {
+ 'auth_client_id': 'admin-cli',
+ 'auth_keycloak_url': 'http://keycloak.url/auth',
+ 'auth_password': 'admin',
+ 'auth_realm': 'master',
+ 'auth_username': 'admin',
+ 'realm': 'master',
+ 'required_actions': [
+ {
+ 'alias': 'test-provider-id',
+ },
+ ],
+ 'state': 'absent',
+ }
+
+ return_value_required_actions = [
+ [
+ {
+ 'alias': 'CONFIGURE_TOTP',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Configure OTP',
+ 'priority': 10,
+ 'providerId': 'CONFIGURE_TOTP'
+ },
+ {
+ 'alias': 'TERMS_AND_CONDITIONS',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Terms and conditions',
+ 'priority': 20,
+ 'providerId': 'TERMS_AND_CONDITIONS'
+ },
+ {
+ 'alias': 'UPDATE_PASSWORD',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Update Password',
+ 'priority': 30,
+ 'providerId': 'UPDATE_PASSWORD'
+ },
+ {
+ 'alias': 'UPDATE_PROFILE',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Update Profile',
+ 'priority': 40,
+ 'providerId': 'UPDATE_PROFILE'
+ },
+ {
+ 'alias': 'VERIFY_EMAIL',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Verify Email',
+ 'priority': 50,
+ 'providerId': 'VERIFY_EMAIL'
+ },
+ {
+ 'alias': 'delete_account',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': False,
+ 'name': 'Delete Account',
+ 'priority': 60,
+ 'providerId': 'delete_account'
+ },
+ {
+ 'alias': 'webauthn-register',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Webauthn Register',
+ 'priority': 70,
+ 'providerId': 'webauthn-register'
+ },
+ {
+ 'alias': 'webauthn-register-passwordless',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Webauthn Register Passwordless',
+ 'priority': 80,
+ 'providerId': 'webauthn-register-passwordless'
+ },
+ {
+ 'alias': 'test-provider-id',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': False,
+ 'name': 'Test provider ID',
+ 'priority': 90,
+ 'providerId': 'test-provider-id'
+ },
+ {
+ 'alias': 'update_user_locale',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Update User Locale',
+ 'priority': 1000,
+ 'providerId': 'update_user_locale'
+ }
+ ],
+ ]
+
+ changed = True
+
+ set_module_args(module_args)
+
+ # Run the module
+ with mock_good_connection():
+ with patch_keycloak_api(
+ get_required_actions=return_value_required_actions,
+ ) as (
+ mock_get_required_actions,
+ mock_register_required_action,
+ mock_update_required_action,
+ mock_delete_required_action,
+ ):
+ with self.assertRaises(AnsibleExitJson) as exec_info:
+ self.module.main()
+
+ # Verify number of call on each mock
+ self.assertEqual(len(mock_get_required_actions.mock_calls), 1)
+ self.assertEqual(len(mock_update_required_action.mock_calls), 0)
+ self.assertEqual(len(mock_register_required_action.mock_calls), 0)
+ self.assertEqual(len(mock_delete_required_action.mock_calls), 1)
+
+ # Verify that the module's changed status matches what is expected
+ self.assertIs(exec_info.exception.args[0]['changed'], changed)
+
+ def test_delete_required_action_idempotency(self):
+ """Delete an already deleted authentication required action."""
+
+ module_args = {
+ 'auth_client_id': 'admin-cli',
+ 'auth_keycloak_url': 'http://keycloak.url/auth',
+ 'auth_password': 'admin',
+ 'auth_realm': 'master',
+ 'auth_username': 'admin',
+ 'realm': 'master',
+ 'required_actions': [
+ {
+ 'alias': 'test-provider-id',
+ },
+ ],
+ 'state': 'absent',
+ }
+
+ return_value_required_actions = [
+ [
+ {
+ 'alias': 'CONFIGURE_TOTP',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Configure OTP',
+ 'priority': 10,
+ 'providerId': 'CONFIGURE_TOTP'
+ },
+ {
+ 'alias': 'TERMS_AND_CONDITIONS',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Terms and conditions',
+ 'priority': 20,
+ 'providerId': 'TERMS_AND_CONDITIONS'
+ },
+ {
+ 'alias': 'UPDATE_PASSWORD',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Update Password',
+ 'priority': 30,
+ 'providerId': 'UPDATE_PASSWORD'
+ },
+ {
+ 'alias': 'UPDATE_PROFILE',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Update Profile',
+ 'priority': 40,
+ 'providerId': 'UPDATE_PROFILE'
+ },
+ {
+ 'alias': 'VERIFY_EMAIL',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Verify Email',
+ 'priority': 50,
+ 'providerId': 'VERIFY_EMAIL'
+ },
+ {
+ 'alias': 'delete_account',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': False,
+ 'name': 'Delete Account',
+ 'priority': 60,
+ 'providerId': 'delete_account'
+ },
+ {
+ 'alias': 'webauthn-register',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Webauthn Register',
+ 'priority': 70,
+ 'providerId': 'webauthn-register'
+ },
+ {
+ 'alias': 'webauthn-register-passwordless',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Webauthn Register Passwordless',
+ 'priority': 80,
+ 'providerId': 'webauthn-register-passwordless'
+ },
+ {
+ 'alias': 'update_user_locale',
+ 'config': {},
+ 'defaultAction': False,
+ 'enabled': True,
+ 'name': 'Update User Locale',
+ 'priority': 1000,
+ 'providerId': 'update_user_locale'
+ }
+ ],
+ ]
+
+ changed = False
+
+ set_module_args(module_args)
+
+ # Run the module
+ with mock_good_connection():
+ with patch_keycloak_api(
+ get_required_actions=return_value_required_actions,
+ ) as (
+ mock_get_required_actions,
+ mock_register_required_action,
+ mock_update_required_action,
+ mock_delete_required_action,
+ ):
+ with self.assertRaises(AnsibleExitJson) as exec_info:
+ self.module.main()
+
+ # Verify number of call on each mock
+ self.assertEqual(len(mock_get_required_actions.mock_calls), 1)
+ self.assertEqual(len(mock_update_required_action.mock_calls), 0)
+ self.assertEqual(len(mock_register_required_action.mock_calls), 0)
+ self.assertEqual(len(mock_delete_required_action.mock_calls), 0)
+
+ # Verify that the module's changed status matches what is expected
+ self.assertIs(exec_info.exception.args[0]['changed'], changed)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_keycloak_client_rolemapping.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_keycloak_client_rolemapping.py
index 58c8b9548..359e6304e 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_keycloak_client_rolemapping.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_keycloak_client_rolemapping.py
@@ -120,6 +120,11 @@ class TestKeycloakRealm(ModuleTestCase):
'state': 'present',
'client_id': 'test_client',
'group_name': 'test_group',
+ 'parents': [
+ {
+ 'name': 'parent_group'
+ }
+ ],
'roles': [
{
'name': 'test_role1',
@@ -139,7 +144,7 @@ class TestKeycloakRealm(ModuleTestCase):
"clientRoles": "{}",
"id": "92f2400e-0ecb-4185-8950-12dcef616c2b",
"name": "test_group",
- "path": "/test_group",
+ "path": "/parent_group/test_group",
"realmRoles": "[]",
"subGroups": "[]"
}]
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_keycloak_role.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_keycloak_role.py
index c48c9771a..cc2f6e716 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_keycloak_role.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_keycloak_role.py
@@ -21,7 +21,9 @@ from ansible.module_utils.six import StringIO
@contextmanager
-def patch_keycloak_api(get_realm_role, create_realm_role=None, update_realm_role=None, delete_realm_role=None):
+def patch_keycloak_api(get_realm_role=None, create_realm_role=None, update_realm_role=None, delete_realm_role=None,
+ get_client_role=None, create_client_role=None, update_client_role=None, delete_client_role=None,
+ get_client_by_id=None, get_role_composites=None):
"""Mock context manager for patching the methods in PwPolicyIPAClient that contact the IPA server
Patches the `login` and `_post_json` methods
@@ -41,7 +43,15 @@ def patch_keycloak_api(get_realm_role, create_realm_role=None, update_realm_role
with patch.object(obj, 'create_realm_role', side_effect=create_realm_role) as mock_create_realm_role:
with patch.object(obj, 'update_realm_role', side_effect=update_realm_role) as mock_update_realm_role:
with patch.object(obj, 'delete_realm_role', side_effect=delete_realm_role) as mock_delete_realm_role:
- yield mock_get_realm_role, mock_create_realm_role, mock_update_realm_role, mock_delete_realm_role
+ with patch.object(obj, 'get_client_role', side_effect=get_client_role) as mock_get_client_role:
+ with patch.object(obj, 'create_client_role', side_effect=create_client_role) as mock_create_client_role:
+ with patch.object(obj, 'update_client_role', side_effect=update_client_role) as mock_update_client_role:
+ with patch.object(obj, 'delete_client_role', side_effect=delete_client_role) as mock_delete_client_role:
+ with patch.object(obj, 'get_client_by_id', side_effect=get_client_by_id) as mock_get_client_by_id:
+ with patch.object(obj, 'get_role_composites', side_effect=get_role_composites) as mock_get_role_composites:
+ yield mock_get_realm_role, mock_create_realm_role, mock_update_realm_role, mock_delete_realm_role, \
+ mock_get_client_role, mock_create_client_role, mock_update_client_role, mock_delete_client_role, \
+ mock_get_client_by_id, mock_get_role_composites
def get_response(object_with_future_response, method, get_id_call_count):
@@ -125,7 +135,9 @@ class TestKeycloakRealmRole(ModuleTestCase):
with mock_good_connection():
with patch_keycloak_api(get_realm_role=return_value_absent, create_realm_role=return_value_created) \
- as (mock_get_realm_role, mock_create_realm_role, mock_update_realm_role, mock_delete_realm_role):
+ as (mock_get_realm_role, mock_create_realm_role, mock_update_realm_role, mock_delete_realm_role,
+ mock_get_client_role, mock_create_client_role, mock_update_client_role, mock_delete_client_role,
+ mock_get_client_by_client_id, mock_get_role_composites):
with self.assertRaises(AnsibleExitJson) as exec_info:
self.module.main()
@@ -179,7 +191,9 @@ class TestKeycloakRealmRole(ModuleTestCase):
with mock_good_connection():
with patch_keycloak_api(get_realm_role=return_value_present, update_realm_role=return_value_updated) \
- as (mock_get_realm_role, mock_create_realm_role, mock_update_realm_role, mock_delete_realm_role):
+ as (mock_get_realm_role, mock_create_realm_role, mock_update_realm_role, mock_delete_realm_role,
+ mock_get_client_role, mock_create_client_role, mock_update_client_role, mock_delete_client_role,
+ mock_get_client_by_client_id, mock_get_role_composites):
with self.assertRaises(AnsibleExitJson) as exec_info:
self.module.main()
@@ -233,7 +247,9 @@ class TestKeycloakRealmRole(ModuleTestCase):
with mock_good_connection():
with patch_keycloak_api(get_realm_role=return_value_present, update_realm_role=return_value_updated) \
- as (mock_get_realm_role, mock_create_realm_role, mock_update_realm_role, mock_delete_realm_role):
+ as (mock_get_realm_role, mock_create_realm_role, mock_update_realm_role, mock_delete_realm_role,
+ mock_get_client_role, mock_create_client_role, mock_update_client_role, mock_delete_client_role,
+ mock_get_client_by_client_id, mock_get_role_composites):
with self.assertRaises(AnsibleExitJson) as exec_info:
self.module.main()
@@ -244,6 +260,140 @@ class TestKeycloakRealmRole(ModuleTestCase):
# Verify that the module's changed status matches what is expected
self.assertIs(exec_info.exception.args[0]['changed'], changed)
+ def test_create_with_composites_when_present_no_change(self):
+ """Update without change a realm role"""
+
+ module_args = {
+ 'auth_keycloak_url': 'http://keycloak.url/auth',
+ 'auth_password': 'admin',
+ 'auth_realm': 'master',
+ 'auth_username': 'admin',
+ 'auth_client_id': 'admin-cli',
+ 'validate_certs': True,
+ 'realm': 'realm-name',
+ 'name': 'role-name',
+ 'description': 'role-description',
+ 'composite': True,
+ 'composites': [
+ {
+ 'client_id': 'client_1',
+ 'name': 'client-role1'
+ },
+ {
+ 'name': 'realm-role-1'
+ }
+ ]
+
+ }
+ return_value_present = [
+ {
+ "attributes": {},
+ "clientRole": False,
+ "composite": True,
+ "containerId": "realm-name",
+ "description": "role-description",
+ "id": "90f1cdb6-be88-496e-89c6-da1fb6bc6966",
+ "name": "role-name",
+ },
+ {
+ "attributes": {},
+ "clientRole": False,
+ "composite": True,
+ "containerId": "realm-name",
+ "description": "role-description",
+ "id": "90f1cdb6-be88-496e-89c6-da1fb6bc6966",
+ "name": "role-name",
+ }
+ ]
+ return_value_updated = [None]
+ return_get_role_composites = [
+ [
+ {
+ 'clientRole': True,
+ 'containerId': 'c4367fac-f427-11ed-8e2f-aff070d20f0e',
+ 'name': 'client-role1'
+ },
+ {
+ 'clientRole': False,
+ 'containerId': 'realm-name',
+ 'name': 'realm-role-1'
+ }
+ ]
+ ]
+ return_get_client_by_client_id = [
+ {
+ "id": "de152444-f126-4a7a-8273-4ee1544133ad",
+ "clientId": "client_1",
+ "name": "client_1",
+ "description": "client_1",
+ "surrogateAuthRequired": False,
+ "enabled": True,
+ "alwaysDisplayInConsole": False,
+ "clientAuthenticatorType": "client-secret",
+ "redirectUris": [
+ "http://localhost:8080/*",
+ ],
+ "webOrigins": [
+ "*"
+ ],
+ "notBefore": 0,
+ "bearerOnly": False,
+ "consentRequired": False,
+ "standardFlowEnabled": True,
+ "implicitFlowEnabled": False,
+ "directAccessGrantsEnabled": False,
+ "serviceAccountsEnabled": False,
+ "publicClient": False,
+ "frontchannelLogout": False,
+ "protocol": "openid-connect",
+ "attributes": {
+ "backchannel.logout.session.required": "true",
+ "backchannel.logout.revoke.offline.tokens": "false"
+ },
+ "authenticationFlowBindingOverrides": {},
+ "fullScopeAllowed": True,
+ "nodeReRegistrationTimeout": -1,
+ "defaultClientScopes": [
+ "web-origins",
+ "acr",
+ "profile",
+ "roles",
+ "email"
+ ],
+ "optionalClientScopes": [
+ "address",
+ "phone",
+ "offline_access",
+ "microprofile-jwt"
+ ]
+ }
+ ]
+
+ changed = False
+
+ set_module_args(module_args)
+
+ # Run the module
+
+ with mock_good_connection():
+ with patch_keycloak_api(get_realm_role=return_value_present, update_realm_role=return_value_updated,
+ get_client_by_id=return_get_client_by_client_id,
+ get_role_composites=return_get_role_composites) \
+ as (mock_get_realm_role, mock_create_realm_role, mock_update_realm_role, mock_delete_realm_role,
+ mock_get_client_role, mock_create_client_role, mock_update_client_role, mock_delete_client_role,
+ mock_get_client_by_client_id, mock_get_role_composites):
+ with self.assertRaises(AnsibleExitJson) as exec_info:
+ self.module.main()
+
+ self.assertEqual(len(mock_get_realm_role.mock_calls), 1)
+ self.assertEqual(len(mock_create_realm_role.mock_calls), 0)
+ self.assertEqual(len(mock_update_realm_role.mock_calls), 0)
+ self.assertEqual(len(mock_get_client_by_client_id.mock_calls), 1)
+ self.assertEqual(len(mock_get_role_composites.mock_calls), 1)
+
+ # Verify that the module's changed status matches what is expected
+ self.assertIs(exec_info.exception.args[0]['changed'], changed)
+
def test_delete_when_absent(self):
"""Remove an absent realm role"""
@@ -268,7 +418,9 @@ class TestKeycloakRealmRole(ModuleTestCase):
with mock_good_connection():
with patch_keycloak_api(get_realm_role=return_value_absent, delete_realm_role=return_value_deleted) \
- as (mock_get_realm_role, mock_create_realm_role, mock_update_realm_role, mock_delete_realm_role):
+ as (mock_get_realm_role, mock_create_realm_role, mock_update_realm_role, mock_delete_realm_role,
+ mock_get_client_role, mock_create_client_role, mock_update_client_role, mock_delete_client_role,
+ mock_get_client_by_client_id, mock_get_role_composites):
with self.assertRaises(AnsibleExitJson) as exec_info:
self.module.main()
@@ -312,7 +464,9 @@ class TestKeycloakRealmRole(ModuleTestCase):
with mock_good_connection():
with patch_keycloak_api(get_realm_role=return_value_absent, delete_realm_role=return_value_deleted) \
- as (mock_get_realm_role, mock_create_realm_role, mock_update_realm_role, mock_delete_realm_role):
+ as (mock_get_realm_role, mock_create_realm_role, mock_update_realm_role, mock_delete_realm_role,
+ mock_get_client_role, mock_create_client_role, mock_update_client_role, mock_delete_client_role,
+ mock_get_client_by_client_id, mock_get_role_composites):
with self.assertRaises(AnsibleExitJson) as exec_info:
self.module.main()
@@ -323,5 +477,207 @@ class TestKeycloakRealmRole(ModuleTestCase):
self.assertIs(exec_info.exception.args[0]['changed'], changed)
+class TestKeycloakClientRole(ModuleTestCase):
+ def setUp(self):
+ super(TestKeycloakClientRole, self).setUp()
+ self.module = keycloak_role
+
+ def test_create_client_role_with_composites_when_absent(self):
+ """Update with change a realm role"""
+
+ module_args = {
+ 'auth_keycloak_url': 'http://keycloak.url/auth',
+ 'auth_password': 'admin',
+ 'auth_realm': 'master',
+ 'auth_username': 'admin',
+ 'auth_client_id': 'admin-cli',
+ 'validate_certs': True,
+ 'realm': 'realm-name',
+ 'client_id': 'client-name',
+ 'name': 'role-name',
+ 'description': 'role-description',
+ 'composite': True,
+ 'composites': [
+ {
+ 'client_id': 'client_1',
+ 'name': 'client-role1'
+ },
+ {
+ 'name': 'realm-role-1'
+ }
+ ]
+ }
+ return_get_client_role = [
+ None,
+ {
+ "attributes": {},
+ "clientRole": True,
+ "composite": True,
+ "composites": [
+ {
+ 'client': {
+ 'client1': ['client-role1']
+ }
+ },
+ {
+ 'realm': ['realm-role-1']
+ }
+ ],
+ "containerId": "9ae25ec2-f40a-11ed-9261-b3bacf720f69",
+ "description": "role-description",
+ "id": "90f1cdb6-be88-496e-89c6-da1fb6bc6966",
+ "name": "role-name",
+ }
+ ]
+ changed = True
+
+ set_module_args(module_args)
+
+ # Run the module
+
+ with mock_good_connection():
+ with patch_keycloak_api(get_client_role=return_get_client_role) \
+ as (mock_get_realm_role, mock_create_realm_role, mock_update_realm_role, mock_delete_realm_role,
+ mock_get_client_role, mock_create_client_role, mock_update_client_role, mock_delete_client_role,
+ mock_get_client_by_client_id, mock_get_role_composites):
+ with self.assertRaises(AnsibleExitJson) as exec_info:
+ self.module.main()
+
+ self.assertEqual(len(mock_get_realm_role.mock_calls), 0)
+ self.assertEqual(len(mock_create_realm_role.mock_calls), 0)
+ self.assertEqual(len(mock_update_realm_role.mock_calls), 0)
+ self.assertEqual(len(mock_get_client_role.mock_calls), 2)
+ self.assertEqual(len(mock_create_client_role.mock_calls), 1)
+ self.assertEqual(len(mock_update_client_role.mock_calls), 0)
+
+ # Verify that the module's changed status matches what is expected
+ self.assertIs(exec_info.exception.args[0]['changed'], changed)
+
+ def test_create_client_role_with_composites_when_present_no_change(self):
+ """Update with change a realm role"""
+
+ module_args = {
+ 'auth_keycloak_url': 'http://keycloak.url/auth',
+ 'auth_password': 'admin',
+ 'auth_realm': 'master',
+ 'auth_username': 'admin',
+ 'auth_client_id': 'admin-cli',
+ 'validate_certs': True,
+ 'realm': 'realm-name',
+ 'client_id': 'client-name',
+ 'name': 'role-name',
+ 'description': 'role-description',
+ 'composite': True,
+ 'composites': [
+ {
+ 'client_id': 'client_1',
+ 'name': 'client-role1'
+ },
+ {
+ 'name': 'realm-role-1'
+ }
+ ]
+ }
+ return_get_client_role = [
+ {
+ "attributes": {},
+ "clientRole": True,
+ "composite": True,
+ "containerId": "9ae25ec2-f40a-11ed-9261-b3bacf720f69",
+ "description": "role-description",
+ "id": "90f1cdb6-be88-496e-89c6-da1fb6bc6966",
+ "name": "role-name",
+ }
+ ]
+ return_get_role_composites = [
+ [
+ {
+ 'clientRole': True,
+ 'containerId': 'c4367fac-f427-11ed-8e2f-aff070d20f0e',
+ 'name': 'client-role1'
+ },
+ {
+ 'clientRole': False,
+ 'containerId': 'realm-name',
+ 'name': 'realm-role-1'
+ }
+ ]
+ ]
+ return_get_client_by_client_id = [
+ {
+ "id": "de152444-f126-4a7a-8273-4ee1544133ad",
+ "clientId": "client_1",
+ "name": "client_1",
+ "description": "client_1",
+ "surrogateAuthRequired": False,
+ "enabled": True,
+ "alwaysDisplayInConsole": False,
+ "clientAuthenticatorType": "client-secret",
+ "redirectUris": [
+ "http://localhost:8080/*",
+ ],
+ "webOrigins": [
+ "*"
+ ],
+ "notBefore": 0,
+ "bearerOnly": False,
+ "consentRequired": False,
+ "standardFlowEnabled": True,
+ "implicitFlowEnabled": False,
+ "directAccessGrantsEnabled": False,
+ "serviceAccountsEnabled": False,
+ "publicClient": False,
+ "frontchannelLogout": False,
+ "protocol": "openid-connect",
+ "attributes": {
+ "backchannel.logout.session.required": "true",
+ "backchannel.logout.revoke.offline.tokens": "false"
+ },
+ "authenticationFlowBindingOverrides": {},
+ "fullScopeAllowed": True,
+ "nodeReRegistrationTimeout": -1,
+ "defaultClientScopes": [
+ "web-origins",
+ "acr",
+ "profile",
+ "roles",
+ "email"
+ ],
+ "optionalClientScopes": [
+ "address",
+ "phone",
+ "offline_access",
+ "microprofile-jwt"
+ ]
+ }
+ ]
+ changed = False
+
+ set_module_args(module_args)
+
+ # Run the module
+
+ with mock_good_connection():
+ with patch_keycloak_api(get_client_role=return_get_client_role, get_client_by_id=return_get_client_by_client_id,
+ get_role_composites=return_get_role_composites) \
+ as (mock_get_realm_role, mock_create_realm_role, mock_update_realm_role, mock_delete_realm_role,
+ mock_get_client_role, mock_create_client_role, mock_update_client_role, mock_delete_client_role,
+ mock_get_client_by_client_id, mock_get_role_composites):
+ with self.assertRaises(AnsibleExitJson) as exec_info:
+ self.module.main()
+
+ self.assertEqual(len(mock_get_realm_role.mock_calls), 0)
+ self.assertEqual(len(mock_create_realm_role.mock_calls), 0)
+ self.assertEqual(len(mock_update_realm_role.mock_calls), 0)
+ self.assertEqual(len(mock_get_client_role.mock_calls), 1)
+ self.assertEqual(len(mock_create_client_role.mock_calls), 0)
+ self.assertEqual(len(mock_update_client_role.mock_calls), 0)
+ self.assertEqual(len(mock_get_client_by_client_id.mock_calls), 1)
+ self.assertEqual(len(mock_get_role_composites.mock_calls), 1)
+
+ # Verify that the module's changed status matches what is expected
+ self.assertIs(exec_info.exception.args[0]['changed'], changed)
+
+
if __name__ == '__main__':
unittest.main()
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_keycloak_user.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_keycloak_user.py
new file mode 100644
index 000000000..26bc33d82
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_keycloak_user.py
@@ -0,0 +1,354 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+from contextlib import contextmanager
+
+from ansible_collections.community.general.tests.unit.compat import unittest
+from ansible_collections.community.general.tests.unit.compat.mock import patch
+from ansible_collections.community.general.tests.unit.plugins.modules.utils import AnsibleExitJson, ModuleTestCase, set_module_args
+
+from ansible_collections.community.general.plugins.modules import keycloak_user
+
+from itertools import count
+
+from ansible.module_utils.six import StringIO
+
+
+@contextmanager
+def patch_keycloak_api(get_user_by_username=None,
+ create_user=None,
+ update_user_groups_membership=None,
+ get_user_groups=None,
+ delete_user=None,
+ update_user=None):
+ """Mock context manager for patching the methods in KeycloakAPI that contact the Keycloak server
+
+ Patches the `get_user_by_username` and `create_user` methods
+
+ """
+
+ obj = keycloak_user.KeycloakAPI
+ with patch.object(obj, 'get_user_by_username', side_effect=get_user_by_username) as mock_get_user_by_username:
+ with patch.object(obj, 'create_user', side_effect=create_user) as mock_create_user:
+ with patch.object(obj, 'update_user_groups_membership', side_effect=update_user_groups_membership) as mock_update_user_groups_membership:
+ with patch.object(obj, 'get_user_groups', side_effect=get_user_groups) as mock_get_user_groups:
+ with patch.object(obj, 'delete_user', side_effect=delete_user) as mock_delete_user:
+ with patch.object(obj, 'update_user', side_effect=update_user) as mock_update_user:
+ yield mock_get_user_by_username, mock_create_user, mock_update_user_groups_membership, \
+ mock_get_user_groups, mock_delete_user, mock_update_user
+
+
+def get_response(object_with_future_response, method, get_id_call_count):
+ if callable(object_with_future_response):
+ return object_with_future_response()
+ if isinstance(object_with_future_response, dict):
+ return get_response(
+ object_with_future_response[method], method, get_id_call_count)
+ if isinstance(object_with_future_response, list):
+ call_number = next(get_id_call_count)
+ return get_response(
+ object_with_future_response[call_number], method, get_id_call_count)
+ return object_with_future_response
+
+
+def build_mocked_request(get_id_user_count, response_dict):
+ def _mocked_requests(*args, **kwargs):
+ url = args[0]
+ method = kwargs['method']
+ future_response = response_dict.get(url, None)
+ return get_response(future_response, method, get_id_user_count)
+
+ return _mocked_requests
+
+
+def create_wrapper(text_as_string):
+ """Allow to mock many times a call to one address.
+ Without this function, the StringIO is empty for the second call.
+ """
+
+ def _create_wrapper():
+ return StringIO(text_as_string)
+
+ return _create_wrapper
+
+
+def mock_good_connection():
+ token_response = {
+ 'http://keycloak.url/auth/realms/master/protocol/openid-connect/token': create_wrapper(
+ '{"access_token": "alongtoken"}'), }
+ return patch(
+ 'ansible_collections.community.general.plugins.module_utils.identity.keycloak.keycloak.open_url',
+ side_effect=build_mocked_request(count(), token_response),
+ autospec=True
+ )
+
+
+class TestKeycloakUser(ModuleTestCase):
+ def setUp(self):
+ super(TestKeycloakUser, self).setUp()
+ self.module = keycloak_user
+
+ def test_add_new_user(self):
+ """Add a new user"""
+
+ module_args = {
+ 'auth_keycloak_url': 'https: // auth.example.com / auth',
+ 'token': '{{ access_token }}',
+ 'state': 'present',
+ 'realm': 'master',
+ 'username': 'test',
+ 'groups': []
+ }
+ return_value_get_user_by_username = [None]
+ return_value_update_user_groups_membership = [False]
+ return_get_user_groups = [[]]
+ return_create_user = [{'id': '123eqwdawer24qwdqw4'}]
+ return_delete_user = None
+ return_update_user = None
+ changed = True
+
+ set_module_args(module_args)
+
+ # Run the module
+
+ with mock_good_connection():
+ with patch_keycloak_api(get_user_by_username=return_value_get_user_by_username,
+ create_user=return_create_user,
+ update_user_groups_membership=return_value_update_user_groups_membership,
+ get_user_groups=return_get_user_groups,
+ update_user=return_update_user,
+ delete_user=return_delete_user) \
+ as (mock_get_user_by_username,
+ mock_create_user,
+ mock_update_user_groups_membership,
+ mock_get_user_groups,
+ mock_delete_user,
+ mock_update_user):
+ with self.assertRaises(AnsibleExitJson) as exec_info:
+ self.module.main()
+
+ self.assertEqual(mock_get_user_by_username.call_count, 1)
+ self.assertEqual(mock_create_user.call_count, 1)
+ self.assertEqual(mock_update_user_groups_membership.call_count, 1)
+ self.assertEqual(mock_get_user_groups.call_count, 1)
+ self.assertEqual(mock_update_user.call_count, 0)
+ self.assertEqual(mock_delete_user.call_count, 0)
+
+ # Verify that the module's changed status matches what is expected
+ self.assertIs(exec_info.exception.args[0]['changed'], changed)
+
+ def test_add_exiting_user_no_change(self):
+ """Add a new user"""
+
+ module_args = {
+ 'auth_keycloak_url': 'https: // auth.example.com / auth',
+ 'token': '{{ access_token }}',
+ 'state': 'present',
+ 'realm': 'master',
+ 'username': 'test',
+ 'groups': []
+ }
+ return_value_get_user_by_username = [
+ {
+ 'id': '123eqwdawer24qwdqw4',
+ 'username': 'test',
+ 'groups': [],
+ 'enabled': True,
+ 'emailVerified': False,
+ 'disableableCredentialTypes': [],
+ 'requiredActions': [],
+ 'credentials': [],
+ 'federatedIdentities': [],
+ 'clientConsents': []
+ }
+ ]
+ return_value_update_user_groups_membership = [False]
+ return_get_user_groups = [[]]
+ return_create_user = None
+ return_delete_user = None
+ return_update_user = None
+ changed = False
+
+ set_module_args(module_args)
+
+ # Run the module
+
+ with mock_good_connection():
+ with patch_keycloak_api(get_user_by_username=return_value_get_user_by_username,
+ create_user=return_create_user,
+ update_user_groups_membership=return_value_update_user_groups_membership,
+ get_user_groups=return_get_user_groups,
+ update_user=return_update_user,
+ delete_user=return_delete_user) \
+ as (mock_get_user_by_username,
+ mock_create_user,
+ mock_update_user_groups_membership,
+ mock_get_user_groups,
+ mock_delete_user,
+ mock_update_user):
+ with self.assertRaises(AnsibleExitJson) as exec_info:
+ self.module.main()
+
+ self.assertEqual(mock_get_user_by_username.call_count, 1)
+ self.assertEqual(mock_create_user.call_count, 0)
+ self.assertEqual(mock_update_user_groups_membership.call_count, 1)
+ self.assertEqual(mock_get_user_groups.call_count, 1)
+ self.assertEqual(mock_update_user.call_count, 0)
+ self.assertEqual(mock_delete_user.call_count, 0)
+
+ # Verify that the module's changed status matches what is expected
+ self.assertIs(exec_info.exception.args[0]['changed'], changed)
+
+ def test_update_user_with_group_changes(self):
+ """Update groups for a user"""
+
+ module_args = {
+ 'auth_keycloak_url': 'https: // auth.example.com / auth',
+ 'token': '{{ access_token }}',
+ 'state': 'present',
+ 'realm': 'master',
+ 'username': 'test',
+ 'first_name': 'test',
+ 'last_name': 'user',
+ 'groups': [{
+ 'name': 'group1',
+ 'state': 'present'
+ }]
+ }
+ return_value_get_user_by_username = [
+ {
+ 'id': '123eqwdawer24qwdqw4',
+ 'username': 'test',
+ 'groups': [],
+ 'enabled': True,
+ 'emailVerified': False,
+ 'disableableCredentialTypes': [],
+ 'requiredActions': [],
+ 'credentials': [],
+ 'federatedIdentities': [],
+ 'clientConsents': []
+ }
+ ]
+ return_value_update_user_groups_membership = [True]
+ return_get_user_groups = [['group1']]
+ return_create_user = None
+ return_delete_user = None
+ return_update_user = [
+ {
+ 'id': '123eqwdawer24qwdqw4',
+ 'username': 'test',
+ 'first_name': 'test',
+ 'last_name': 'user',
+ 'enabled': True,
+ 'emailVerified': False,
+ 'disableableCredentialTypes': [],
+ 'requiredActions': [],
+ 'credentials': [],
+ 'federatedIdentities': [],
+ 'clientConsents': []
+ }
+ ]
+ changed = True
+
+ set_module_args(module_args)
+
+ # Run the module
+
+ with mock_good_connection():
+ with patch_keycloak_api(get_user_by_username=return_value_get_user_by_username,
+ create_user=return_create_user,
+ update_user_groups_membership=return_value_update_user_groups_membership,
+ get_user_groups=return_get_user_groups,
+ update_user=return_update_user,
+ delete_user=return_delete_user) \
+ as (mock_get_user_by_username,
+ mock_create_user,
+ mock_update_user_groups_membership,
+ mock_get_user_groups,
+ mock_delete_user,
+ mock_update_user):
+ with self.assertRaises(AnsibleExitJson) as exec_info:
+ self.module.main()
+
+ self.assertEqual(mock_get_user_by_username.call_count, 1)
+ self.assertEqual(mock_create_user.call_count, 0)
+ self.assertEqual(mock_update_user_groups_membership.call_count, 1)
+ self.assertEqual(mock_get_user_groups.call_count, 1)
+ self.assertEqual(mock_update_user.call_count, 1)
+ self.assertEqual(mock_delete_user.call_count, 0)
+
+ # Verify that the module's changed status matches what is expected
+ self.assertIs(exec_info.exception.args[0]['changed'], changed)
+
+ def test_delete_user(self):
+ """Delete a user"""
+
+ module_args = {
+ 'auth_keycloak_url': 'https: // auth.example.com / auth',
+ 'token': '{{ access_token }}',
+ 'state': 'absent',
+ 'realm': 'master',
+ 'username': 'test',
+ 'groups': []
+ }
+ return_value_get_user_by_username = [
+ {
+ 'id': '123eqwdawer24qwdqw4',
+ 'username': 'test',
+ 'groups': [],
+ 'enabled': True,
+ 'emailVerified': False,
+ 'disableableCredentialTypes': [],
+ 'requiredActions': [],
+ 'credentials': [],
+ 'federatedIdentities': [],
+ 'clientConsents': []
+ }
+ ]
+ return_value_update_user_groups_membership = None
+ return_get_user_groups = None
+ return_create_user = None
+ return_delete_user = None
+ return_update_user = None
+ changed = True
+
+ set_module_args(module_args)
+
+ # Run the module
+
+ with mock_good_connection():
+ with patch_keycloak_api(get_user_by_username=return_value_get_user_by_username,
+ create_user=return_create_user,
+ update_user_groups_membership=return_value_update_user_groups_membership,
+ get_user_groups=return_get_user_groups,
+ update_user=return_update_user,
+ delete_user=return_delete_user) \
+ as (mock_get_user_by_username,
+ mock_create_user,
+ mock_update_user_groups_membership,
+ mock_get_user_groups,
+ mock_delete_user,
+ mock_update_user):
+ with self.assertRaises(AnsibleExitJson) as exec_info:
+ self.module.main()
+
+ self.assertEqual(mock_get_user_by_username.call_count, 1)
+ self.assertEqual(mock_create_user.call_count, 0)
+ self.assertEqual(mock_update_user_groups_membership.call_count, 0)
+ self.assertEqual(mock_get_user_groups.call_count, 0)
+ self.assertEqual(mock_update_user.call_count, 0)
+ self.assertEqual(mock_delete_user.call_count, 1)
+
+ # Verify that the module's changed status matches what is expected
+ self.assertIs(exec_info.exception.args[0]['changed'], changed)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_keycloak_user_federation.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_keycloak_user_federation.py
index 8d3dcaa23..523ef9f21 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_keycloak_user_federation.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_keycloak_user_federation.py
@@ -326,6 +326,7 @@ class TestKeycloakUserFederation(ModuleTestCase):
'connectionPooling': True,
'pagination': True,
'allowKerberosAuthentication': False,
+ 'krbPrincipalAttribute': 'krbPrincipalName',
'debug': False,
'useKerberosForPasswordAuthentication': False,
},
@@ -374,6 +375,9 @@ class TestKeycloakUserFederation(ModuleTestCase):
"enabled": [
"true"
],
+ "krbPrincipalAttribute": [
+ "krb5PrincipalName"
+ ],
"usernameLDAPAttribute": [
"uid"
],
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_lvg_rename.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_lvg_rename.py
new file mode 100644
index 000000000..0f2fcb7fa
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_lvg_rename.py
@@ -0,0 +1,160 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) Contributors to the Ansible project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from ansible_collections.community.general.plugins.modules import lvg_rename
+from ansible_collections.community.general.tests.unit.compat.mock import patch
+from ansible_collections.community.general.tests.unit.plugins.modules.utils import (
+ AnsibleFailJson, AnsibleExitJson, ModuleTestCase, set_module_args)
+
+
+VGS_OUTPUT = '''\
+vg_data_testhost1;XKZ5gn-YhWY-NlrT-QCFN-qmMG-VGT9-7uOmex
+vg_sys_testhost2;xgy2SJ-YlYd-fde2-e3oG-zdXL-0xGf-ihqG2H
+'''
+
+
+class TestLvgRename(ModuleTestCase):
+ """Tests for lvg_rename internals"""
+ module = lvg_rename
+ module_path = 'ansible_collections.community.general.plugins.modules.lvg_rename'
+
+ def setUp(self):
+ """Prepare mocks for module testing"""
+ super(TestLvgRename, self).setUp()
+
+ self.mock_run_responses = {}
+
+ patched_module_get_bin_path = patch('%s.AnsibleModule.get_bin_path' % (self.module_path))
+ self.mock_module_get_bin_path = patched_module_get_bin_path.start()
+ self.mock_module_get_bin_path.return_value = '/mocpath'
+ self.addCleanup(patched_module_get_bin_path.stop)
+
+ patched_module_run_command = patch('%s.AnsibleModule.run_command' % (self.module_path))
+ self.mock_module_run_command = patched_module_run_command.start()
+ self.addCleanup(patched_module_run_command.stop)
+
+ def test_vg_not_found_by_name(self):
+ """When the VG by the specified by vg name not found, the module should exit with error"""
+ failed = True
+ self.mock_module_run_command.side_effect = [(0, VGS_OUTPUT, '')]
+ expected_msg = 'Both current (vg_missing) and new (vg_data_testhost2) VG are missing.'
+
+ module_args = {
+ 'vg': 'vg_missing',
+ 'vg_new': 'vg_data_testhost2',
+ }
+ set_module_args(args=module_args)
+
+ with self.assertRaises(AnsibleFailJson) as result:
+ self.module.main()
+
+ self.assertEqual(len(self.mock_module_run_command.mock_calls), 1)
+ self.assertIs(result.exception.args[0]['failed'], failed)
+ self.assertEqual(result.exception.args[0]['msg'], expected_msg)
+
+ def test_vg_not_found_by_uuid(self):
+ """When the VG by the specified vg UUID not found, the module should exit with error"""
+ failed = True
+ self.mock_module_run_command.side_effect = [(0, VGS_OUTPUT, '')]
+ expected_msg = 'Both current (Yfj4YG-c8nI-z7w5-B7Fw-i2eM-HqlF-ApFVp0) and new (vg_data_testhost2) VG are missing.'
+
+ module_args = {
+ 'vg': 'Yfj4YG-c8nI-z7w5-B7Fw-i2eM-HqlF-ApFVp0',
+ 'vg_new': 'vg_data_testhost2',
+ }
+ set_module_args(args=module_args)
+
+ with self.assertRaises(AnsibleFailJson) as result:
+ self.module.main()
+
+ self.assertEqual(len(self.mock_module_run_command.mock_calls), 1)
+ self.assertIs(result.exception.args[0]['failed'], failed)
+ self.assertEqual(result.exception.args[0]['msg'], expected_msg)
+
+ def test_vg_and_vg_new_both_exists(self):
+ """When a VG found for both vg and vg_new options, the module should exit with error"""
+ failed = True
+ self.mock_module_run_command.side_effect = [(0, VGS_OUTPUT, '')]
+ expected_msg = 'The new VG name (vg_sys_testhost2) is already in use.'
+
+ module_args = {
+ 'vg': 'vg_data_testhost1',
+ 'vg_new': 'vg_sys_testhost2',
+ }
+ set_module_args(args=module_args)
+
+ with self.assertRaises(AnsibleFailJson) as result:
+ self.module.main()
+
+ self.assertEqual(len(self.mock_module_run_command.mock_calls), 1)
+ self.assertIs(result.exception.args[0]['failed'], failed)
+ self.assertEqual(result.exception.args[0]['msg'], expected_msg)
+
+ def test_vg_needs_renaming(self):
+ """When the VG found for vg option and there is no VG for vg_new option,
+ the module should call vgrename"""
+ changed = True
+ self.mock_module_run_command.side_effect = [
+ (0, VGS_OUTPUT, ''),
+ (0, ' Volume group "vg_data_testhost1" successfully renamed to "vg_data_testhost2"', '')
+ ]
+ expected_msg = ' Volume group "vg_data_testhost1" successfully renamed to "vg_data_testhost2"'
+
+ module_args = {
+ 'vg': '/dev/vg_data_testhost1',
+ 'vg_new': 'vg_data_testhost2',
+ }
+ set_module_args(args=module_args)
+
+ with self.assertRaises(AnsibleExitJson) as result:
+ self.module.main()
+
+ self.assertEqual(len(self.mock_module_run_command.mock_calls), 2)
+ self.assertIs(result.exception.args[0]['changed'], changed)
+ self.assertEqual(result.exception.args[0]['msg'], expected_msg)
+
+ def test_vg_needs_renaming_in_check_mode(self):
+ """When running in check mode and the VG found for vg option and there is no VG for vg_new option,
+ the module should not call vgrename"""
+ changed = True
+ self.mock_module_run_command.side_effect = [(0, VGS_OUTPUT, '')]
+ expected_msg = 'Running in check mode. The module would rename VG /dev/vg_data_testhost1 to vg_data_testhost2.'
+
+ module_args = {
+ 'vg': '/dev/vg_data_testhost1',
+ 'vg_new': 'vg_data_testhost2',
+ '_ansible_check_mode': True,
+ }
+ set_module_args(args=module_args)
+
+ with self.assertRaises(AnsibleExitJson) as result:
+ self.module.main()
+
+ self.assertEqual(len(self.mock_module_run_command.mock_calls), 1)
+ self.assertIs(result.exception.args[0]['changed'], changed)
+ self.assertEqual(result.exception.args[0]['msg'], expected_msg)
+
+ def test_vg_needs_no_renaming(self):
+ """When the VG not found for vg option and the VG found for vg_new option,
+ the module should not call vgrename"""
+ changed = False
+ self.mock_module_run_command.side_effect = [(0, VGS_OUTPUT, '')]
+ expected_msg = 'The new VG (vg_data_testhost1) already exists, nothing to do.'
+
+ module_args = {
+ 'vg': 'vg_data_testhostX',
+ 'vg_new': 'vg_data_testhost1',
+ }
+ set_module_args(args=module_args)
+
+ with self.assertRaises(AnsibleExitJson) as result:
+ self.module.main()
+
+ self.assertEqual(len(self.mock_module_run_command.mock_calls), 1)
+ self.assertIs(result.exception.args[0]['changed'], changed)
+ self.assertEqual(result.exception.args[0]['msg'], expected_msg)
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_modprobe.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_modprobe.py
index 18695695a..2ad083151 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_modprobe.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_modprobe.py
@@ -152,7 +152,7 @@ class TestUnloadModule(ModuleTestCase):
class TestModuleIsLoadedPersistently(ModuleTestCase):
def setUp(self):
if (sys.version_info[0] == 3 and sys.version_info[1] < 7) or (sys.version_info[0] == 2 and sys.version_info[1] < 7):
- self.skipTest('open_mock doesnt support readline in earlier python versions')
+ self.skipTest("open_mock doesn't support readline in earlier python versions")
super(TestModuleIsLoadedPersistently, self).setUp()
@@ -230,7 +230,7 @@ class TestModuleIsLoadedPersistently(ModuleTestCase):
class TestPermanentParams(ModuleTestCase):
def setUp(self):
if (sys.version_info[0] == 3 and sys.version_info[1] < 7) or (sys.version_info[0] == 2 and sys.version_info[1] < 7):
- self.skipTest('open_mock doesnt support readline in earlier python versions')
+ self.skipTest("open_mock doesn't support readline in earlier python versions")
super(TestPermanentParams, self).setUp()
self.mock_get_bin_path = patch('ansible.module_utils.basic.AnsibleModule.get_bin_path')
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_nmcli.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_nmcli.py
index efd8284a3..8c9c007ac 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_nmcli.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_nmcli.py
@@ -118,6 +118,12 @@ TESTCASE_CONNECTION = [
'state': 'absent',
'_ansible_check_mode': True,
},
+ {
+ 'type': 'loopback',
+ 'conn_name': 'non_existent_nw_device',
+ 'state': 'absent',
+ '_ansible_check_mode': True,
+ },
]
TESTCASE_GENERIC = [
@@ -262,6 +268,25 @@ ipv4.routes: { ip = 192.168.200.0/24, nh = 192.168.1.
ipv4.route-metric: 10
"""
+TESTCASE_ETHERNET_MOD_IPV4_INT_WITH_ROUTE_AND_METRIC_CLEAR = [
+ {
+ 'type': 'ethernet',
+ 'conn_name': 'non_existent_nw_device',
+ 'routes4': [],
+ 'state': 'present',
+ '_ansible_check_mode': False,
+ '_ansible_diff': True,
+ },
+ {
+ 'type': 'ethernet',
+ 'conn_name': 'non_existent_nw_device',
+ 'routes4_extended': [],
+ 'state': 'present',
+ '_ansible_check_mode': False,
+ '_ansible_diff': True,
+ },
+]
+
TESTCASE_ETHERNET_MOD_IPV6_INT_WITH_ROUTE_AND_METRIC = [
{
'type': 'ethernet',
@@ -453,6 +478,38 @@ ipv6.ignore-auto-dns: no
ipv6.ignore-auto-routes: no
"""
+TESTCASE_GENERIC_DNS4_OPTIONS = [
+ {
+ 'type': 'generic',
+ 'conn_name': 'non_existent_nw_device',
+ 'ifname': 'generic_non_existant',
+ 'ip4': '10.10.10.10/24',
+ 'gw4': '10.10.10.1',
+ 'state': 'present',
+ 'dns4_options': [],
+ 'dns6_options': [],
+ '_ansible_check_mode': False,
+ }
+]
+
+TESTCASE_GENERIC_DNS4_OPTIONS_SHOW_OUTPUT = """\
+connection.id: non_existent_nw_device
+connection.interface-name: generic_non_existant
+connection.autoconnect: yes
+ipv4.method: manual
+ipv4.addresses: 10.10.10.10/24
+ipv4.gateway: 10.10.10.1
+ipv4.ignore-auto-dns: no
+ipv4.ignore-auto-routes: no
+ipv4.never-default: no
+ipv4.dns-options: --
+ipv4.may-fail: yes
+ipv6.dns-options: --
+ipv6.method: auto
+ipv6.ignore-auto-dns: no
+ipv6.ignore-auto-routes: no
+"""
+
TESTCASE_GENERIC_ZONE = [
{
'type': 'generic',
@@ -569,6 +626,7 @@ TESTCASE_BRIDGE_SLAVE = [
'type': 'bridge-slave',
'conn_name': 'non_existent_nw_device',
'ifname': 'br0_non_existant',
+ 'hairpin': True,
'path_cost': 100,
'state': 'present',
'_ansible_check_mode': False,
@@ -892,6 +950,28 @@ TESTCASE_ETHERNET_STATIC = [
}
]
+TESTCASE_LOOPBACK = [
+ {
+ 'type': 'loopback',
+ 'conn_name': 'lo',
+ 'ifname': 'lo',
+ 'ip4': '127.0.0.1/8',
+ 'state': 'present',
+ '_ansible_check_mode': False,
+ }
+]
+
+TESTCASE_LOOPBACK_MODIFY = [
+ {
+ 'type': 'loopback',
+ 'conn_name': 'lo',
+ 'ifname': 'lo',
+ 'ip4': ['127.0.0.1/8', '127.0.0.2/8'],
+ 'state': 'present',
+ '_ansible_check_mode': False,
+ }
+]
+
TESTCASE_ETHERNET_STATIC_SHOW_OUTPUT = """\
connection.id: non_existent_nw_device
connection.interface-name: ethernet_non_existant
@@ -910,6 +990,21 @@ ipv6.ignore-auto-dns: no
ipv6.ignore-auto-routes: no
"""
+TESTCASE_LOOPBACK_SHOW_OUTPUT = """\
+connection.id: lo
+connection.interface-name: lo
+connection.autoconnect: yes
+ipv4.method: manual
+ipv4.addresses: 127.0.0.1/8
+ipv4.ignore-auto-dns: no
+ipv4.ignore-auto-routes: no
+ipv4.never-default: no
+ipv4.may-fail: yes
+ipv6.method: manual
+ipv6.ignore-auto-dns: no
+ipv6.ignore-auto-routes: no
+"""
+
TESTCASE_ETHERNET_STATIC_MULTIPLE_IP4_ADDRESSES = [
{
'type': 'ethernet',
@@ -1393,7 +1488,8 @@ ipv4.may-fail: yes
ipv6.method: auto
ipv6.ignore-auto-dns: no
ipv6.ignore-auto-routes: no
-infiniband.transport-mode datagram
+infiniband.mtu: auto
+infiniband.transport-mode: datagram
"""
TESTCASE_INFINIBAND_STATIC_MODIFY_TRANSPORT_MODE = [
@@ -1514,6 +1610,13 @@ def mocked_generic_connection_dns_search_unchanged(mocker):
@pytest.fixture
+def mocked_generic_connection_dns_options_unchanged(mocker):
+ mocker_set(mocker,
+ connection_exists=True,
+ execute_return=(0, TESTCASE_GENERIC_DNS4_OPTIONS_SHOW_OUTPUT, ""))
+
+
+@pytest.fixture
def mocked_generic_connection_zone_unchanged(mocker):
mocker_set(mocker,
connection_exists=True,
@@ -1672,6 +1775,17 @@ def mocked_ethernet_connection_with_ipv4_static_address_static_route_metric_modi
@pytest.fixture
+def mocked_ethernet_connection_with_ipv4_static_address_static_route_metric_clear(mocker):
+ mocker_set(mocker,
+ connection_exists=True,
+ execute_return=None,
+ execute_side_effect=(
+ (0, TESTCASE_ETHERNET_MOD_IPV4_INT_WITH_ROUTE_AND_METRIC_SHOW_OUTPUT, ""),
+ (0, "", ""),
+ ))
+
+
+@pytest.fixture
def mocked_ethernet_connection_with_ipv6_static_address_static_route_metric_modify(mocker):
mocker_set(mocker,
connection_exists=True,
@@ -1875,6 +1989,24 @@ def mocked_generic_connection_diff_check(mocker):
execute_return=(0, TESTCASE_GENERIC_SHOW_OUTPUT, ""))
+@pytest.fixture
+def mocked_loopback_connection_unchanged(mocker):
+ mocker_set(mocker,
+ connection_exists=True,
+ execute_return=(0, TESTCASE_LOOPBACK_SHOW_OUTPUT, ""))
+
+
+@pytest.fixture
+def mocked_loopback_connection_modify(mocker):
+ mocker_set(mocker,
+ connection_exists=True,
+ execute_return=None,
+ execute_side_effect=(
+ (0, TESTCASE_LOOPBACK_SHOW_OUTPUT, ""),
+ (0, "", ""),
+ ))
+
+
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_BOND, indirect=['patch_ansible_module'])
def test_bond_connection_create(mocked_generic_connection_create, capfd):
"""
@@ -2066,6 +2198,62 @@ def test_generic_connection_dns_search_unchanged(mocked_generic_connection_dns_s
assert not results['changed']
+@pytest.mark.parametrize('patch_ansible_module', TESTCASE_GENERIC_DNS4_OPTIONS, indirect=['patch_ansible_module'])
+def test_generic_connection_create_dns_options(mocked_generic_connection_create, capfd):
+ """
+ Test : Generic connection created with dns options
+ """
+ with pytest.raises(SystemExit):
+ nmcli.main()
+
+ assert nmcli.Nmcli.execute_command.call_count == 1
+ arg_list = nmcli.Nmcli.execute_command.call_args_list
+ args, kwargs = arg_list[0]
+
+ assert 'ipv4.dns-options' in args[0]
+ assert 'ipv6.dns-options' in args[0]
+
+ out, err = capfd.readouterr()
+ results = json.loads(out)
+ assert not results.get('failed')
+ assert results['changed']
+
+
+@pytest.mark.parametrize('patch_ansible_module', TESTCASE_GENERIC_DNS4_OPTIONS, indirect=['patch_ansible_module'])
+def test_generic_connection_modify_dns_options(mocked_generic_connection_create, capfd):
+ """
+ Test : Generic connection modified with dns options
+ """
+ with pytest.raises(SystemExit):
+ nmcli.main()
+
+ assert nmcli.Nmcli.execute_command.call_count == 1
+ arg_list = nmcli.Nmcli.execute_command.call_args_list
+ args, kwargs = arg_list[0]
+
+ assert 'ipv4.dns-options' in args[0]
+ assert 'ipv6.dns-options' in args[0]
+
+ out, err = capfd.readouterr()
+ results = json.loads(out)
+ assert not results.get('failed')
+ assert results['changed']
+
+
+@pytest.mark.parametrize('patch_ansible_module', TESTCASE_GENERIC_DNS4_OPTIONS, indirect=['patch_ansible_module'])
+def test_generic_connection_dns_options_unchanged(mocked_generic_connection_dns_options_unchanged, capfd):
+ """
+ Test : Generic connection with dns options unchanged
+ """
+ with pytest.raises(SystemExit):
+ nmcli.main()
+
+ out, err = capfd.readouterr()
+ results = json.loads(out)
+ assert not results.get('failed')
+ assert not results['changed']
+
+
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_CONNECTION, indirect=['patch_ansible_module'])
def test_dns4_none(mocked_connection_exists, capfd):
"""
@@ -2991,6 +3179,38 @@ def test_ethernet_connection_static_ipv4_address_static_route_with_metric_modify
assert not results.get('failed')
+@pytest.mark.parametrize('patch_ansible_module', TESTCASE_ETHERNET_MOD_IPV4_INT_WITH_ROUTE_AND_METRIC_CLEAR, indirect=['patch_ansible_module'])
+def test_ethernet_connection_static_ipv4_address_static_route_with_metric_clear(
+ mocked_ethernet_connection_with_ipv4_static_address_static_route_metric_clear, capfd):
+ """
+ Test : Modify ethernet connection with static IPv4 address and static route
+ """
+ with pytest.raises(SystemExit):
+ nmcli.main()
+
+ arg_list = nmcli.Nmcli.execute_command.call_args_list
+ add_args, add_kw = arg_list[1]
+
+ assert add_args[0][0] == '/usr/bin/nmcli'
+ assert add_args[0][1] == 'con'
+ assert add_args[0][2] == 'modify'
+ assert add_args[0][3] == 'non_existent_nw_device'
+
+ add_args_text = list(map(to_text, add_args[0]))
+
+ for param in ['ipv4.routes', '']:
+ assert param in add_args_text
+
+ out, err = capfd.readouterr()
+ results = json.loads(out)
+
+ assert 'ipv4.routes' in results['diff']['before']
+ assert 'ipv4.routes' in results['diff']['after']
+
+ assert results.get('changed') is True
+ assert not results.get('failed')
+
+
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_ETHERNET_ADD_IPV6_INT_WITH_ROUTE, indirect=['patch_ansible_module'])
def test_ethernet_connection_static_ipv6_address_static_route_create(mocked_ethernet_connection_with_ipv6_static_address_static_route_create, capfd):
"""
@@ -4032,6 +4252,7 @@ def test_bond_connection_unchanged(mocked_generic_connection_diff_check, capfd):
state=dict(type='str', required=True, choices=['absent', 'present']),
conn_name=dict(type='str', required=True),
master=dict(type='str'),
+ slave_type=dict(type=str, choices=['bond', 'bridge', 'team']),
ifname=dict(type='str'),
type=dict(type='str',
choices=[
@@ -4077,6 +4298,7 @@ def test_bond_connection_unchanged(mocked_generic_connection_diff_check, capfd):
never_default4=dict(type='bool', default=False),
dns4=dict(type='list', elements='str'),
dns4_search=dict(type='list', elements='str'),
+ dns4_options=dict(type='list', elements='str'),
dns4_ignore_auto=dict(type='bool', default=False),
method4=dict(type='str', choices=['auto', 'link-local', 'manual', 'shared', 'disabled']),
may_fail4=dict(type='bool', default=True),
@@ -4086,6 +4308,7 @@ def test_bond_connection_unchanged(mocked_generic_connection_diff_check, capfd):
gw6_ignore_auto=dict(type='bool', default=False),
dns6=dict(type='list', elements='str'),
dns6_search=dict(type='list', elements='str'),
+ dns6_options=dict(type='list', elements='str'),
dns6_ignore_auto=dict(type='bool', default=False),
routes6=dict(type='list', elements='str'),
routes6_extended=dict(type='list',
@@ -4259,3 +4482,366 @@ def test_macvlan_mod(mocked_generic_connection_modify, capfd):
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
+
+
+TESTCASE_SLAVE_TYPE_BRIDGE_CONNECTION = [
+ {
+ 'type': 'ethernet',
+ 'conn_name': 'fake_conn',
+ 'ifname': 'fake_eth0',
+ 'state': 'present',
+ 'slave_type': 'bridge',
+ 'master': 'fake_br0',
+ '_ansible_check_mode': False,
+ }
+]
+
+
+TESTCASE_SLAVE_TYPE_BRIDGE_CONNECTION_SHOW_OUTPUT = """\
+connection.id: fake_conn
+connection.type: 802-3-ethernet
+connection.interface-name: fake_eth0
+connection.autoconnect: yes
+connection.master: --
+connection.slave-type: --
+802-3-ethernet.mtu: auto
+"""
+
+
+TESTCASE_SLAVE_TYPE_BRIDGE_CONNECTION_UNCHANGED_SHOW_OUTPUT = """\
+connection.id: fake_conn
+connection.type: 802-3-ethernet
+connection.interface-name: fake_eth0
+connection.autoconnect: yes
+connection.master: fake_br0
+connection.slave-type: bridge
+802-3-ethernet.mtu: auto
+"""
+
+
+@pytest.fixture
+def mocked_slave_type_bridge_create(mocker):
+ mocker_set(mocker,
+ execute_return=None,
+ execute_side_effect=(
+ (0, TESTCASE_SLAVE_TYPE_BRIDGE_CONNECTION_SHOW_OUTPUT, ""),
+ (0, "", ""),
+ ))
+
+
+@pytest.mark.parametrize('patch_ansible_module', TESTCASE_SLAVE_TYPE_BRIDGE_CONNECTION, indirect=['patch_ansible_module'])
+def test_create_slave_type_bridge(mocked_slave_type_bridge_create, capfd):
+ """
+ Test : slave for bridge created
+ """
+
+ with pytest.raises(SystemExit):
+ nmcli.main()
+
+ assert nmcli.Nmcli.execute_command.call_count == 1
+ arg_list = nmcli.Nmcli.execute_command.call_args_list
+ args, kwargs = arg_list[0]
+
+ assert args[0][0] == '/usr/bin/nmcli'
+ assert args[0][1] == 'con'
+ assert args[0][2] == 'add'
+ assert args[0][3] == 'type'
+ assert args[0][4] == 'ethernet'
+ assert args[0][5] == 'con-name'
+ assert args[0][6] == 'fake_conn'
+ con_master_index = args[0].index('connection.master')
+ slave_type_index = args[0].index('connection.slave-type')
+ assert args[0][con_master_index + 1] == 'fake_br0'
+ assert args[0][slave_type_index + 1] == 'bridge'
+
+ out, err = capfd.readouterr()
+ results = json.loads(out)
+ assert not results.get('failed')
+ assert results['changed']
+
+
+@pytest.fixture
+def mocked_create_slave_type_bridge_unchanged(mocker):
+ mocker_set(mocker,
+ connection_exists=True,
+ execute_return=(0, TESTCASE_SLAVE_TYPE_BRIDGE_CONNECTION_UNCHANGED_SHOW_OUTPUT, ""))
+
+
+@pytest.mark.parametrize('patch_ansible_module', TESTCASE_SLAVE_TYPE_BRIDGE_CONNECTION, indirect=['patch_ansible_module'])
+def test_slave_type_bridge_unchanged(mocked_create_slave_type_bridge_unchanged, capfd):
+ """
+ Test : Existent slave for bridge unchanged
+ """
+ with pytest.raises(SystemExit):
+ nmcli.main()
+
+ out, err = capfd.readouterr()
+ results = json.loads(out)
+ assert not results.get('failed')
+ assert not results['changed']
+
+
+TESTCASE_SLAVE_TYPE_BOND_CONNECTION = [
+ {
+ 'type': 'ethernet',
+ 'conn_name': 'fake_conn',
+ 'ifname': 'fake_eth0',
+ 'state': 'present',
+ 'slave_type': 'bond',
+ 'master': 'fake_bond0',
+ '_ansible_check_mode': False,
+ }
+]
+
+
+TESTCASE_SLAVE_TYPE_BOND_CONNECTION_SHOW_OUTPUT = """\
+connection.id: fake_conn
+connection.type: 802-3-ethernet
+connection.interface-name: fake_eth0
+connection.autoconnect: yes
+connection.master: --
+connection.slave-type: --
+802-3-ethernet.mtu: auto
+"""
+
+
+TESTCASE_SLAVE_TYPE_BOND_CONNECTION_UNCHANGED_SHOW_OUTPUT = """\
+connection.id: fake_conn
+connection.type: 802-3-ethernet
+connection.interface-name: fake_eth0
+connection.autoconnect: yes
+connection.master: fake_bond0
+connection.slave-type: bond
+802-3-ethernet.mtu: auto
+"""
+
+
+@pytest.fixture
+def mocked_slave_type_bond_create(mocker):
+ mocker_set(mocker,
+ execute_return=None,
+ execute_side_effect=(
+ (0, TESTCASE_SLAVE_TYPE_BOND_CONNECTION_SHOW_OUTPUT, ""),
+ (0, "", ""),
+ ))
+
+
+@pytest.mark.parametrize('patch_ansible_module', TESTCASE_SLAVE_TYPE_BOND_CONNECTION, indirect=['patch_ansible_module'])
+def test_create_slave_type_bond(mocked_slave_type_bond_create, capfd):
+ """
+ Test : slave for bond created
+ """
+
+ with pytest.raises(SystemExit):
+ nmcli.main()
+
+ assert nmcli.Nmcli.execute_command.call_count == 1
+ arg_list = nmcli.Nmcli.execute_command.call_args_list
+ args, kwargs = arg_list[0]
+
+ assert args[0][0] == '/usr/bin/nmcli'
+ assert args[0][1] == 'con'
+ assert args[0][2] == 'add'
+ assert args[0][3] == 'type'
+ assert args[0][4] == 'ethernet'
+ assert args[0][5] == 'con-name'
+ assert args[0][6] == 'fake_conn'
+ con_master_index = args[0].index('connection.master')
+ slave_type_index = args[0].index('connection.slave-type')
+ assert args[0][con_master_index + 1] == 'fake_bond0'
+ assert args[0][slave_type_index + 1] == 'bond'
+
+ out, err = capfd.readouterr()
+ results = json.loads(out)
+ assert not results.get('failed')
+ assert results['changed']
+
+
+@pytest.fixture
+def mocked_create_slave_type_bond_unchanged(mocker):
+ mocker_set(mocker,
+ connection_exists=True,
+ execute_return=(0, TESTCASE_SLAVE_TYPE_BOND_CONNECTION_UNCHANGED_SHOW_OUTPUT, ""))
+
+
+@pytest.mark.parametrize('patch_ansible_module', TESTCASE_SLAVE_TYPE_BOND_CONNECTION, indirect=['patch_ansible_module'])
+def test_slave_type_bond_unchanged(mocked_create_slave_type_bond_unchanged, capfd):
+ """
+ Test : Existent slave for bridge unchanged
+ """
+ with pytest.raises(SystemExit):
+ nmcli.main()
+
+ out, err = capfd.readouterr()
+ results = json.loads(out)
+ assert not results.get('failed')
+ assert not results['changed']
+
+
+TESTCASE_SLAVE_TYPE_TEAM_CONNECTION = [
+ {
+ 'type': 'ethernet',
+ 'conn_name': 'fake_conn',
+ 'ifname': 'fake_eth0',
+ 'state': 'present',
+ 'slave_type': 'team',
+ 'master': 'fake_team0',
+ '_ansible_check_mode': False,
+ }
+]
+
+
+TESTCASE_SLAVE_TYPE_TEAM_CONNECTION_SHOW_OUTPUT = """\
+connection.id: fake_conn
+connection.type: 802-3-ethernet
+connection.interface-name: fake_eth0
+connection.autoconnect: yes
+connection.master: --
+connection.slave-type: --
+802-3-ethernet.mtu: auto
+"""
+
+
+TESTCASE_SLAVE_TYPE_TEAM_CONNECTION_UNCHANGED_SHOW_OUTPUT = """\
+connection.id: fake_conn
+connection.type: 802-3-ethernet
+connection.interface-name: fake_eth0
+connection.autoconnect: yes
+connection.master: fake_team0
+connection.slave-type: team
+802-3-ethernet.mtu: auto
+"""
+
+
+@pytest.fixture
+def mocked_slave_type_team_create(mocker):
+ mocker_set(mocker,
+ execute_return=None,
+ execute_side_effect=(
+ (0, TESTCASE_SLAVE_TYPE_TEAM_CONNECTION_SHOW_OUTPUT, ""),
+ (0, "", ""),
+ ))
+
+
+@pytest.mark.parametrize('patch_ansible_module', TESTCASE_SLAVE_TYPE_TEAM_CONNECTION, indirect=['patch_ansible_module'])
+def test_create_slave_type_team(mocked_slave_type_team_create, capfd):
+ """
+ Test : slave for bond created
+ """
+
+ with pytest.raises(SystemExit):
+ nmcli.main()
+
+ assert nmcli.Nmcli.execute_command.call_count == 1
+ arg_list = nmcli.Nmcli.execute_command.call_args_list
+ args, kwargs = arg_list[0]
+
+ assert args[0][0] == '/usr/bin/nmcli'
+ assert args[0][1] == 'con'
+ assert args[0][2] == 'add'
+ assert args[0][3] == 'type'
+ assert args[0][4] == 'ethernet'
+ assert args[0][5] == 'con-name'
+ assert args[0][6] == 'fake_conn'
+ con_master_index = args[0].index('connection.master')
+ slave_type_index = args[0].index('connection.slave-type')
+ assert args[0][con_master_index + 1] == 'fake_team0'
+ assert args[0][slave_type_index + 1] == 'team'
+
+ out, err = capfd.readouterr()
+ results = json.loads(out)
+ assert not results.get('failed')
+ assert results['changed']
+
+
+@pytest.fixture
+def mocked_create_slave_type_team_unchanged(mocker):
+ mocker_set(mocker,
+ connection_exists=True,
+ execute_return=(0, TESTCASE_SLAVE_TYPE_TEAM_CONNECTION_UNCHANGED_SHOW_OUTPUT, ""))
+
+
+@pytest.mark.parametrize('patch_ansible_module', TESTCASE_SLAVE_TYPE_TEAM_CONNECTION, indirect=['patch_ansible_module'])
+def test_slave_type_team_unchanged(mocked_create_slave_type_team_unchanged, capfd):
+ """
+ Test : Existent slave for bridge unchanged
+ """
+ with pytest.raises(SystemExit):
+ nmcli.main()
+
+ out, err = capfd.readouterr()
+ results = json.loads(out)
+ assert not results.get('failed')
+ assert not results['changed']
+
+
+@pytest.mark.parametrize('patch_ansible_module', TESTCASE_LOOPBACK, indirect=['patch_ansible_module'])
+def test_create_loopback(mocked_generic_connection_create, capfd):
+ """
+ Test : Create loopback connection
+ """
+
+ with pytest.raises(SystemExit):
+ nmcli.main()
+
+ assert nmcli.Nmcli.execute_command.call_count == 1
+ arg_list = nmcli.Nmcli.execute_command.call_args_list
+ add_args, add_kw = arg_list[0]
+
+ assert add_args[0][0] == '/usr/bin/nmcli'
+ assert add_args[0][1] == 'con'
+ assert add_args[0][2] == 'add'
+ assert add_args[0][3] == 'type'
+ assert add_args[0][4] == 'loopback'
+ assert add_args[0][5] == 'con-name'
+ assert add_args[0][6] == 'lo'
+
+ add_args_text = list(map(to_text, add_args[0]))
+ for param in ['connection.interface-name', 'lo',
+ 'ipv4.addresses', '127.0.0.1/8']:
+ assert param in add_args_text
+
+ out, err = capfd.readouterr()
+ results = json.loads(out)
+ assert not results.get('failed')
+ assert results['changed']
+
+
+@pytest.mark.parametrize('patch_ansible_module', TESTCASE_LOOPBACK, indirect=['patch_ansible_module'])
+def test_unchanged_loopback(mocked_loopback_connection_unchanged, capfd):
+ """
+ Test : loopback connection unchanged
+ """
+ with pytest.raises(SystemExit):
+ nmcli.main()
+
+ out, err = capfd.readouterr()
+ results = json.loads(out)
+ assert not results.get('failed')
+ assert not results['changed']
+
+
+@pytest.mark.parametrize('patch_ansible_module', TESTCASE_LOOPBACK_MODIFY, indirect=['patch_ansible_module'])
+def test_add_second_ip4_address_to_loopback_connection(mocked_loopback_connection_modify, capfd):
+ """
+ Test : Modify loopback connection
+ """
+ with pytest.raises(SystemExit):
+ nmcli.main()
+
+ assert nmcli.Nmcli.execute_command.call_count == 2
+ arg_list = nmcli.Nmcli.execute_command.call_args_list
+ args, kwargs = arg_list[1]
+
+ assert args[0][0] == '/usr/bin/nmcli'
+ assert args[0][1] == 'con'
+ assert args[0][2] == 'modify'
+ assert args[0][3] == 'lo'
+
+ for param in ['ipv4.addresses', '127.0.0.1/8,127.0.0.2/8']:
+ assert param in args[0]
+
+ out, err = capfd.readouterr()
+ results = json.loads(out)
+ assert not results.get('failed')
+ assert results['changed']
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_nomad_token.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_nomad_token.py
new file mode 100644
index 000000000..48f060f8b
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_nomad_token.py
@@ -0,0 +1,222 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import nomad
+from ansible_collections.community.general.plugins.modules import nomad_token
+from ansible_collections.community.general.tests.unit.compat.mock import patch
+from ansible_collections.community.general.tests.unit.plugins.modules.utils import AnsibleExitJson, AnsibleFailJson, \
+ ModuleTestCase, \
+ set_module_args
+
+
+def mock_acl_get_tokens(empty_list=False):
+ response_object = []
+
+ if not empty_list:
+ response_object = [
+ {
+ 'AccessorID': 'bac2b162-2a63-efa2-4e68-55d79dcb7721',
+ 'Name': 'Bootstrap Token', 'Type': 'management',
+ 'Policies': None, 'Roles': None, 'Global': True,
+ 'Hash': 'BUJ3BerTfrqFVm1P+vZr1gz9ubOkd+JAvYjNAJyaU9Y=',
+ 'CreateTime': '2023-11-12T18:44:39.740562185Z',
+ 'ExpirationTime': None,
+ 'CreateIndex': 9,
+ 'ModifyIndex': 9
+ },
+ {
+ 'AccessorID': '0d01c55f-8d63-f832-04ff-1866d4eb594e',
+ 'Name': 'devs',
+ 'Type': 'client', 'Policies': ['readonly'],
+ 'Roles': None,
+ 'Global': True,
+ 'Hash': 'eSn8H8RVqh8As8WQNnC2vlBRqXy6DECogc5umzX0P30=',
+ 'CreateTime': '2023-11-12T18:48:34.248857001Z',
+ 'ExpirationTime': None,
+ 'CreateIndex': 14,
+ 'ModifyIndex': 836
+ }
+ ]
+
+ return response_object
+
+
+def mock_acl_generate_bootstrap():
+ response_object = {
+ 'AccessorID': '0d01c55f-8d63-f832-04ff-1866d4eb594e',
+ 'Name': 'Bootstrap Token',
+ 'Type': 'management',
+ 'Policies': None,
+ 'Roles': None,
+ 'Global': True,
+ 'Hash': 'BUJ3BerTfrqFVm1P+vZr1gz9ubOkd+JAvYjNAJyaU9Y=',
+ 'CreateTime': '2023-11-12T18:48:34.248857001Z',
+ 'ExpirationTime': None,
+ 'ExpirationTTL': '',
+ 'CreateIndex': 14,
+ 'ModifyIndex': 836,
+ 'SecretID': 'd539a03d-337a-8504-6d12-000f861337bc'
+ }
+ return response_object
+
+
+def mock_acl_create_update_token():
+ response_object = {
+ 'AccessorID': '0d01c55f-8d63-f832-04ff-1866d4eb594e',
+ 'Name': 'dev',
+ 'Type': 'client',
+ 'Policies': ['readonly'],
+ 'Roles': None,
+ 'Global': True,
+ 'Hash': 'eSn8H8RVqh8As8WQNnC2vlBRqXy6DECogc5umzX0P30=',
+ 'CreateTime': '2023-11-12T18:48:34.248857001Z',
+ 'ExpirationTime': None,
+ 'ExpirationTTL': '',
+ 'CreateIndex': 14,
+ 'ModifyIndex': 836,
+ 'SecretID': 'd539a03d-337a-8504-6d12-000f861337bc'
+ }
+
+ return response_object
+
+
+def mock_acl_delete_token():
+ return {}
+
+
+class TestNomadTokenModule(ModuleTestCase):
+
+ def setUp(self):
+ super(TestNomadTokenModule, self).setUp()
+ self.module = nomad_token
+
+ def tearDown(self):
+ super(TestNomadTokenModule, self).tearDown()
+
+ def test_should_fail_without_parameters(self):
+ with self.assertRaises(AnsibleFailJson):
+ set_module_args({})
+ self.module.main()
+
+ def test_should_create_token_type_client(self):
+ module_args = {
+ 'host': 'localhost',
+ 'name': 'Dev token',
+ 'token_type': 'client',
+ 'state': 'present'
+ }
+
+ set_module_args(module_args)
+ with patch.object(nomad.api.acl.Acl, 'get_tokens', return_value=mock_acl_get_tokens()) as mock_get_tokens:
+ with patch.object(nomad.api.acl.Acl, 'create_token', return_value=mock_acl_create_update_token()) as \
+ mock_create_update_token:
+ with self.assertRaises(AnsibleExitJson):
+ self.module.main()
+
+ self.assertIs(mock_get_tokens.call_count, 1)
+ self.assertIs(mock_create_update_token.call_count, 1)
+
+ def test_should_create_token_type_bootstrap(self):
+ module_args = {
+ 'host': 'localhost',
+ 'token_type': 'bootstrap',
+ 'state': 'present'
+ }
+
+ set_module_args(module_args)
+
+ with patch.object(nomad.api.acl.Acl, 'get_tokens') as mock_get_tokens:
+ with patch.object(nomad.api.Acl, 'generate_bootstrap') as mock_generate_bootstrap:
+ mock_get_tokens.return_value = mock_acl_get_tokens(empty_list=True)
+ mock_generate_bootstrap.return_value = mock_acl_generate_bootstrap()
+
+ with self.assertRaises(AnsibleExitJson):
+ self.module.main()
+
+ self.assertIs(mock_get_tokens.call_count, 1)
+ self.assertIs(mock_generate_bootstrap.call_count, 1)
+
+ def test_should_fail_delete_without_name_parameter(self):
+ module_args = {
+ 'host': 'localhost',
+ 'state': 'absent'
+ }
+
+ set_module_args(module_args)
+ with patch.object(nomad.api.acl.Acl, 'get_tokens') as mock_get_tokens:
+ with patch.object(nomad.api.acl.Acl, 'delete_token') as mock_delete_token:
+ mock_get_tokens.return_value = mock_acl_get_tokens()
+ mock_delete_token.return_value = mock_acl_delete_token()
+
+ with self.assertRaises(AnsibleFailJson):
+ self.module.main()
+
+ def test_should_fail_delete_bootstrap_token(self):
+ module_args = {
+ 'host': 'localhost',
+ 'token_type': 'boostrap',
+ 'state': 'absent'
+ }
+
+ set_module_args(module_args)
+
+ with self.assertRaises(AnsibleFailJson):
+ self.module.main()
+
+ def test_should_fail_delete_boostrap_token_by_name(self):
+ module_args = {
+ 'host': 'localhost',
+ 'name': 'Bootstrap Token',
+ 'state': 'absent'
+ }
+
+ set_module_args(module_args)
+
+ with self.assertRaises(AnsibleFailJson):
+ self.module.main()
+
+ def test_should_delete_client_token(self):
+ module_args = {
+ 'host': 'localhost',
+ 'name': 'devs',
+ 'state': 'absent'
+ }
+
+ set_module_args(module_args)
+
+ with patch.object(nomad.api.acl.Acl, 'get_tokens') as mock_get_tokens:
+ with patch.object(nomad.api.acl.Acl, 'delete_token') as mock_delete_token:
+ mock_get_tokens.return_value = mock_acl_get_tokens()
+ mock_delete_token.return_value = mock_acl_delete_token()
+
+ with self.assertRaises(AnsibleExitJson):
+ self.module.main()
+
+ self.assertIs(mock_delete_token.call_count, 1)
+
+ def test_should_update_client_token(self):
+ module_args = {
+ 'host': 'localhost',
+ 'name': 'devs',
+ 'token_type': 'client',
+ 'state': 'present'
+ }
+
+ set_module_args(module_args)
+
+ with patch.object(nomad.api.acl.Acl, 'get_tokens') as mock_get_tokens:
+ with patch.object(nomad.api.acl.Acl, 'update_token') as mock_create_update_token:
+ mock_get_tokens.return_value = mock_acl_get_tokens()
+ mock_create_update_token.return_value = mock_acl_create_update_token()
+
+ with self.assertRaises(AnsibleExitJson):
+ self.module.main()
+ self.assertIs(mock_get_tokens.call_count, 1)
+ self.assertIs(mock_create_update_token.call_count, 1)
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_npm.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_npm.py
index f5d312775..cc4d65172 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_npm.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_npm.py
@@ -48,8 +48,8 @@ class NPMModuleTestCase(ModuleTestCase):
self.assertTrue(result['changed'])
self.module_main_command.assert_has_calls([
- call(['/testbin/npm', 'list', '--json', '--long', '--global'], check_rc=False, cwd=None),
- call(['/testbin/npm', 'install', '--global', 'coffee-script'], check_rc=True, cwd=None),
+ call(['/testbin/npm', 'list', '--json', '--long', '--global'], check_rc=False, cwd=None, environ_update={'LANGUAGE': 'C', 'LC_ALL': 'C'}),
+ call(['/testbin/npm', 'install', '--global', 'coffee-script'], check_rc=True, cwd=None, environ_update={'LANGUAGE': 'C', 'LC_ALL': 'C'}),
])
def test_present_missing(self):
@@ -67,8 +67,8 @@ class NPMModuleTestCase(ModuleTestCase):
self.assertTrue(result['changed'])
self.module_main_command.assert_has_calls([
- call(['/testbin/npm', 'list', '--json', '--long', '--global'], check_rc=False, cwd=None),
- call(['/testbin/npm', 'install', '--global', 'coffee-script'], check_rc=True, cwd=None),
+ call(['/testbin/npm', 'list', '--json', '--long', '--global'], check_rc=False, cwd=None, environ_update={'LANGUAGE': 'C', 'LC_ALL': 'C'}),
+ call(['/testbin/npm', 'install', '--global', 'coffee-script'], check_rc=True, cwd=None, environ_update={'LANGUAGE': 'C', 'LC_ALL': 'C'}),
])
def test_present_version(self):
@@ -87,8 +87,8 @@ class NPMModuleTestCase(ModuleTestCase):
self.assertTrue(result['changed'])
self.module_main_command.assert_has_calls([
- call(['/testbin/npm', 'list', '--json', '--long', '--global'], check_rc=False, cwd=None),
- call(['/testbin/npm', 'install', '--global', 'coffee-script@2.5.1'], check_rc=True, cwd=None),
+ call(['/testbin/npm', 'list', '--json', '--long', '--global'], check_rc=False, cwd=None, environ_update={'LANGUAGE': 'C', 'LC_ALL': 'C'}),
+ call(['/testbin/npm', 'install', '--global', 'coffee-script@2.5.1'], check_rc=True, cwd=None, environ_update={'LANGUAGE': 'C', 'LC_ALL': 'C'}),
])
def test_present_version_update(self):
@@ -107,8 +107,8 @@ class NPMModuleTestCase(ModuleTestCase):
self.assertTrue(result['changed'])
self.module_main_command.assert_has_calls([
- call(['/testbin/npm', 'list', '--json', '--long', '--global'], check_rc=False, cwd=None),
- call(['/testbin/npm', 'install', '--global', 'coffee-script@2.5.1'], check_rc=True, cwd=None),
+ call(['/testbin/npm', 'list', '--json', '--long', '--global'], check_rc=False, cwd=None, environ_update={'LANGUAGE': 'C', 'LC_ALL': 'C'}),
+ call(['/testbin/npm', 'install', '--global', 'coffee-script@2.5.1'], check_rc=True, cwd=None, environ_update={'LANGUAGE': 'C', 'LC_ALL': 'C'}),
])
def test_present_version_exists(self):
@@ -127,7 +127,7 @@ class NPMModuleTestCase(ModuleTestCase):
self.assertFalse(result['changed'])
self.module_main_command.assert_has_calls([
- call(['/testbin/npm', 'list', '--json', '--long', '--global'], check_rc=False, cwd=None),
+ call(['/testbin/npm', 'list', '--json', '--long', '--global'], check_rc=False, cwd=None, environ_update={'LANGUAGE': 'C', 'LC_ALL': 'C'}),
])
def test_absent(self):
@@ -145,8 +145,8 @@ class NPMModuleTestCase(ModuleTestCase):
self.assertTrue(result['changed'])
self.module_main_command.assert_has_calls([
- call(['/testbin/npm', 'list', '--json', '--long', '--global'], check_rc=False, cwd=None),
- call(['/testbin/npm', 'uninstall', '--global', 'coffee-script'], check_rc=True, cwd=None),
+ call(['/testbin/npm', 'list', '--json', '--long', '--global'], check_rc=False, cwd=None, environ_update={'LANGUAGE': 'C', 'LC_ALL': 'C'}),
+ call(['/testbin/npm', 'uninstall', '--global', 'coffee-script'], check_rc=True, cwd=None, environ_update={'LANGUAGE': 'C', 'LC_ALL': 'C'}),
])
def test_absent_version(self):
@@ -165,8 +165,8 @@ class NPMModuleTestCase(ModuleTestCase):
self.assertTrue(result['changed'])
self.module_main_command.assert_has_calls([
- call(['/testbin/npm', 'list', '--json', '--long', '--global'], check_rc=False, cwd=None),
- call(['/testbin/npm', 'uninstall', '--global', 'coffee-script'], check_rc=True, cwd=None),
+ call(['/testbin/npm', 'list', '--json', '--long', '--global'], check_rc=False, cwd=None, environ_update={'LANGUAGE': 'C', 'LC_ALL': 'C'}),
+ call(['/testbin/npm', 'uninstall', '--global', 'coffee-script'], check_rc=True, cwd=None, environ_update={'LANGUAGE': 'C', 'LC_ALL': 'C'}),
])
def test_absent_version_different(self):
@@ -185,8 +185,8 @@ class NPMModuleTestCase(ModuleTestCase):
self.assertTrue(result['changed'])
self.module_main_command.assert_has_calls([
- call(['/testbin/npm', 'list', '--json', '--long', '--global'], check_rc=False, cwd=None),
- call(['/testbin/npm', 'uninstall', '--global', 'coffee-script'], check_rc=True, cwd=None),
+ call(['/testbin/npm', 'list', '--json', '--long', '--global'], check_rc=False, cwd=None, environ_update={'LANGUAGE': 'C', 'LC_ALL': 'C'}),
+ call(['/testbin/npm', 'uninstall', '--global', 'coffee-script'], check_rc=True, cwd=None, environ_update={'LANGUAGE': 'C', 'LC_ALL': 'C'}),
])
def test_present_package_json(self):
@@ -203,7 +203,7 @@ class NPMModuleTestCase(ModuleTestCase):
self.assertTrue(result['changed'])
self.module_main_command.assert_has_calls([
- call(['/testbin/npm', 'install', '--global'], check_rc=True, cwd=None),
+ call(['/testbin/npm', 'install', '--global'], check_rc=True, cwd=None, environ_update={'LANGUAGE': 'C', 'LC_ALL': 'C'}),
])
def test_present_package_json_production(self):
@@ -221,7 +221,7 @@ class NPMModuleTestCase(ModuleTestCase):
self.assertTrue(result['changed'])
self.module_main_command.assert_has_calls([
- call(['/testbin/npm', 'install', '--global', '--production'], check_rc=True, cwd=None),
+ call(['/testbin/npm', 'install', '--global', '--production'], check_rc=True, cwd=None, environ_update={'LANGUAGE': 'C', 'LC_ALL': 'C'}),
])
def test_present_package_json_ci(self):
@@ -239,7 +239,7 @@ class NPMModuleTestCase(ModuleTestCase):
self.assertTrue(result['changed'])
self.module_main_command.assert_has_calls([
- call(['/testbin/npm', 'ci', '--global'], check_rc=True, cwd=None),
+ call(['/testbin/npm', 'ci', '--global'], check_rc=True, cwd=None, environ_update={'LANGUAGE': 'C', 'LC_ALL': 'C'}),
])
def test_present_package_json_ci_production(self):
@@ -258,5 +258,5 @@ class NPMModuleTestCase(ModuleTestCase):
self.assertTrue(result['changed'])
self.module_main_command.assert_has_calls([
- call(['/testbin/npm', 'ci', '--global', '--production'], check_rc=True, cwd=None),
+ call(['/testbin/npm', 'ci', '--global', '--production'], check_rc=True, cwd=None, environ_update={'LANGUAGE': 'C', 'LC_ALL': 'C'}),
])
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_opkg.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_opkg.py
index 8e52368ff..c42025959 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_opkg.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_opkg.py
@@ -6,236 +6,9 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import json
-from collections import namedtuple
from ansible_collections.community.general.plugins.modules import opkg
+from .helper import Helper
-import pytest
-TESTED_MODULE = opkg.__name__
-
-
-ModuleTestCase = namedtuple("ModuleTestCase", ["id", "input", "output", "run_command_calls"])
-RunCmdCall = namedtuple("RunCmdCall", ["command", "environ", "rc", "out", "err"])
-
-
-@pytest.fixture
-def patch_opkg(mocker):
- mocker.patch('ansible.module_utils.basic.AnsibleModule.get_bin_path', return_value='/testbin/opkg')
-
-
-TEST_CASES = [
- ModuleTestCase(
- id="install_zlibdev",
- input={"name": "zlib-dev", "state": "present"},
- output={
- "msg": "installed 1 package(s)"
- },
- run_command_calls=[
- RunCmdCall(
- command=["/testbin/opkg", "list-installed", "zlib-dev"],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out="",
- err="",
- ),
- RunCmdCall(
- command=["/testbin/opkg", "install", "zlib-dev"],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out=(
- "Installing zlib-dev (1.2.11-6) to root..."
- "Downloading https://downloads.openwrt.org/releases/22.03.0/packages/mips_24kc/base/zlib-dev_1.2.11-6_mips_24kc.ipk"
- "Installing zlib (1.2.11-6) to root..."
- "Downloading https://downloads.openwrt.org/releases/22.03.0/packages/mips_24kc/base/zlib_1.2.11-6_mips_24kc.ipk"
- "Configuring zlib."
- "Configuring zlib-dev."
- ),
- err="",
- ),
- RunCmdCall(
- command=["/testbin/opkg", "list-installed", "zlib-dev"],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out="zlib-dev - 1.2.11-6\n",
- err="",
- ),
- ],
- ),
- ModuleTestCase(
- id="install_zlibdev_present",
- input={"name": "zlib-dev", "state": "present"},
- output={
- "msg": "package(s) already present"
- },
- run_command_calls=[
- RunCmdCall(
- command=["/testbin/opkg", "list-installed", "zlib-dev"],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out="zlib-dev - 1.2.11-6\n",
- err="",
- ),
- ],
- ),
- ModuleTestCase(
- id="install_zlibdev_force_reinstall",
- input={"name": "zlib-dev", "state": "present", "force": "reinstall"},
- output={
- "msg": "installed 1 package(s)"
- },
- run_command_calls=[
- RunCmdCall(
- command=["/testbin/opkg", "list-installed", "zlib-dev"],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out="zlib-dev - 1.2.11-6\n",
- err="",
- ),
- RunCmdCall(
- command=["/testbin/opkg", "install", "--force-reinstall", "zlib-dev"],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out=(
- "Installing zlib-dev (1.2.11-6) to root...\n"
- "Downloading https://downloads.openwrt.org/releases/22.03.0/packages/mips_24kc/base/zlib-dev_1.2.11-6_mips_24kc.ipk\n"
- "Configuring zlib-dev.\n"
- ),
- err="",
- ),
- RunCmdCall(
- command=["/testbin/opkg", "list-installed", "zlib-dev"],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out="zlib-dev - 1.2.11-6\n",
- err="",
- ),
- ],
- ),
- ModuleTestCase(
- id="install_zlibdev_with_version",
- input={"name": "zlib-dev=1.2.11-6", "state": "present"},
- output={
- "msg": "installed 1 package(s)"
- },
- run_command_calls=[
- RunCmdCall(
- command=["/testbin/opkg", "list-installed", "zlib-dev"],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out="",
- err="",
- ),
- RunCmdCall(
- command=["/testbin/opkg", "install", "zlib-dev=1.2.11-6"],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out=(
- "Installing zlib-dev (1.2.11-6) to root..."
- "Downloading https://downloads.openwrt.org/releases/22.03.0/packages/mips_24kc/base/zlib-dev_1.2.11-6_mips_24kc.ipk"
- "Installing zlib (1.2.11-6) to root..."
- "Downloading https://downloads.openwrt.org/releases/22.03.0/packages/mips_24kc/base/zlib_1.2.11-6_mips_24kc.ipk"
- "Configuring zlib."
- "Configuring zlib-dev."
- ),
- err="",
- ),
- RunCmdCall(
- command=["/testbin/opkg", "list-installed", "zlib-dev"],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out="zlib-dev - 1.2.11-6 \n", # This output has the extra space at the end, to satisfy the behaviour of Yocto/OpenEmbedded's opkg
- err="",
- ),
- ],
- ),
- ModuleTestCase(
- id="install_vim_updatecache",
- input={"name": "vim-fuller", "state": "present", "update_cache": True},
- output={
- "msg": "installed 1 package(s)"
- },
- run_command_calls=[
- RunCmdCall(
- command=["/testbin/opkg", "update"],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out="",
- err="",
- ),
- RunCmdCall(
- command=["/testbin/opkg", "list-installed", "vim-fuller"],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out="",
- err="",
- ),
- RunCmdCall(
- command=["/testbin/opkg", "install", "vim-fuller"],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out=(
- "Multiple packages (libgcc1 and libgcc1) providing same name marked HOLD or PREFER. Using latest.\n"
- "Installing vim-fuller (9.0-1) to root...\n"
- "Downloading https://downloads.openwrt.org/snapshots/packages/x86_64/packages/vim-fuller_9.0-1_x86_64.ipk\n"
- "Installing terminfo (6.4-2) to root...\n"
- "Downloading https://downloads.openwrt.org/snapshots/packages/x86_64/base/terminfo_6.4-2_x86_64.ipk\n"
- "Installing libncurses6 (6.4-2) to root...\n"
- "Downloading https://downloads.openwrt.org/snapshots/packages/x86_64/base/libncurses6_6.4-2_x86_64.ipk\n"
- "Configuring terminfo.\n"
- "Configuring libncurses6.\n"
- "Configuring vim-fuller.\n"
- ),
- err="",
- ),
- RunCmdCall(
- command=["/testbin/opkg", "list-installed", "vim-fuller"],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out="vim-fuller - 9.0-1 \n", # This output has the extra space at the end, to satisfy the behaviour of Yocto/OpenEmbedded's opkg
- err="",
- ),
- ],
- ),
-]
-TEST_CASES_IDS = [item.id for item in TEST_CASES]
-
-
-@pytest.mark.parametrize('patch_ansible_module, testcase',
- [[x.input, x] for x in TEST_CASES],
- ids=TEST_CASES_IDS,
- indirect=['patch_ansible_module'])
-@pytest.mark.usefixtures('patch_ansible_module')
-def test_opkg(mocker, capfd, patch_opkg, testcase):
- """
- Run unit tests for test cases listen in TEST_CASES
- """
-
- run_cmd_calls = testcase.run_command_calls
-
- # Mock function used for running commands first
- call_results = [(x.rc, x.out, x.err) for x in run_cmd_calls]
- mock_run_command = mocker.patch('ansible.module_utils.basic.AnsibleModule.run_command', side_effect=call_results)
-
- # Try to run test case
- with pytest.raises(SystemExit):
- opkg.main()
-
- out, err = capfd.readouterr()
- results = json.loads(out)
- print("testcase =\n%s" % str(testcase))
- print("results =\n%s" % results)
-
- for test_result in testcase.output:
- assert results[test_result] == testcase.output[test_result], \
- "'{0}': '{1}' != '{2}'".format(test_result, results[test_result], testcase.output[test_result])
-
- call_args_list = [(item[0][0], item[1]) for item in mock_run_command.call_args_list]
- expected_call_args_list = [(item.command, item.environ) for item in run_cmd_calls]
- print("call args list =\n%s" % call_args_list)
- print("expected args list =\n%s" % expected_call_args_list)
-
- assert mock_run_command.call_count == len(run_cmd_calls)
- if mock_run_command.call_count:
- assert call_args_list == expected_call_args_list
+Helper.from_module(opkg, __name__)
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_opkg.yaml b/ansible_collections/community/general/tests/unit/plugins/modules/test_opkg.yaml
new file mode 100644
index 000000000..6e227dea2
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_opkg.yaml
@@ -0,0 +1,142 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) Alexei Znamensky (russoz@gmail.com)
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+---
+- id: install_zlibdev
+ input:
+ name: zlib-dev
+ state: present
+ output:
+ msg: installed 1 package(s)
+ run_command_calls:
+ - command: [/testbin/opkg, list-installed, zlib-dev]
+ environ: &env-def {environ_update: {LANGUAGE: C, LC_ALL: C}, check_rc: false}
+ rc: 0
+ out: ""
+ err: ""
+ - command: [/testbin/opkg, install, zlib-dev]
+ environ: *env-def
+ rc: 0
+ out: |
+ Installing zlib-dev (1.2.11-6) to root...
+ Downloading https://downloads.openwrt.org/releases/22.03.0/packages/mips_24kc/base/zlib-dev_1.2.11-6_mips_24kc.ipk
+ Installing zlib (1.2.11-6) to root...
+ Downloading https://downloads.openwrt.org/releases/22.03.0/packages/mips_24kc/base/zlib_1.2.11-6_mips_24kc.ipk
+ Configuring zlib.
+ Configuring zlib-dev.
+ err: ""
+ - command: [/testbin/opkg, list-installed, zlib-dev]
+ environ: *env-def
+ rc: 0
+ out: |
+ zlib-dev - 1.2.11-6
+ err: ""
+- id: install_zlibdev_present
+ input:
+ name: zlib-dev
+ state: present
+ output:
+ msg: package(s) already present
+ run_command_calls:
+ - command: [/testbin/opkg, list-installed, zlib-dev]
+ environ: *env-def
+ rc: 0
+ out: |
+ zlib-dev - 1.2.11-6
+ err: ""
+- id: install_zlibdev_force_reinstall
+ input:
+ name: zlib-dev
+ state: present
+ force: reinstall
+ output:
+ msg: installed 1 package(s)
+ run_command_calls:
+ - command: [/testbin/opkg, list-installed, zlib-dev]
+ environ: *env-def
+ rc: 0
+ out: |
+ zlib-dev - 1.2.11-6
+ err: ""
+ - command: [/testbin/opkg, install, --force-reinstall, zlib-dev]
+ environ: *env-def
+ rc: 0
+ out: |
+ Installing zlib-dev (1.2.11-6) to root...
+ Downloading https://downloads.openwrt.org/releases/22.03.0/packages/mips_24kc/base/zlib-dev_1.2.11-6_mips_24kc.ipk
+ Configuring zlib-dev.
+ err: ""
+ - command: [/testbin/opkg, list-installed, zlib-dev]
+ environ: *env-def
+ rc: 0
+ out: |
+ zlib-dev - 1.2.11-6
+ err: ""
+- id: install_zlibdev_with_version
+ input:
+ name: zlib-dev=1.2.11-6
+ state: present
+ output:
+ msg: installed 1 package(s)
+ run_command_calls:
+ - command: [/testbin/opkg, list-installed, zlib-dev]
+ environ: *env-def
+ rc: 0
+ out: ""
+ err: ""
+ - command: [/testbin/opkg, install, zlib-dev=1.2.11-6]
+ environ: *env-def
+ rc: 0
+ out: |
+ Installing zlib-dev (1.2.11-6) to root...
+ Downloading https://downloads.openwrt.org/releases/22.03.0/packages/mips_24kc/base/zlib-dev_1.2.11-6_mips_24kc.ipk
+ Installing zlib (1.2.11-6) to root...
+ Downloading https://downloads.openwrt.org/releases/22.03.0/packages/mips_24kc/base/zlib_1.2.11-6_mips_24kc.ipk
+ Configuring zlib.
+ Configuring zlib-dev.
+ err: ""
+ - command: [/testbin/opkg, list-installed, zlib-dev]
+ environ: *env-def
+ rc: 0
+ out: "zlib-dev - 1.2.11-6 \n" # This output has the extra space at the end, to satisfy the behaviour of Yocto/OpenEmbedded's opkg
+ err: ""
+- id: install_vim_updatecache
+ input:
+ name: vim-fuller
+ state: present
+ update_cache: true
+ output:
+ msg: installed 1 package(s)
+ run_command_calls:
+ - command: [/testbin/opkg, update]
+ environ: *env-def
+ rc: 0
+ out: ""
+ err: ""
+ - command: [/testbin/opkg, list-installed, vim-fuller]
+ environ: *env-def
+ rc: 0
+ out: ""
+ err: ""
+ - command: [/testbin/opkg, install, vim-fuller]
+ environ: *env-def
+ rc: 0
+ out: |
+ Multiple packages (libgcc1 and libgcc1) providing same name marked HOLD or PREFER. Using latest.
+ Installing vim-fuller (9.0-1) to root...
+ Downloading https://downloads.openwrt.org/snapshots/packages/x86_64/packages/vim-fuller_9.0-1_x86_64.ipk
+ Installing terminfo (6.4-2) to root...
+ Downloading https://downloads.openwrt.org/snapshots/packages/x86_64/base/terminfo_6.4-2_x86_64.ipk
+ Installing libncurses6 (6.4-2) to root...
+ Downloading https://downloads.openwrt.org/snapshots/packages/x86_64/base/libncurses6_6.4-2_x86_64.ipk
+ Configuring terminfo.
+ Configuring libncurses6.
+ Configuring vim-fuller.
+ err: ""
+ - command: [/testbin/opkg, list-installed, vim-fuller]
+ environ: *env-def
+ rc: 0
+ out: "vim-fuller - 9.0-1 \n" # This output has the extra space at the end, to satisfy the behaviour of Yocto/OpenEmbedded's opkg
+ err: ""
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_pagerduty.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_pagerduty.py
index d363804bc..75987d3df 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_pagerduty.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_pagerduty.py
@@ -20,9 +20,9 @@ class PagerDutyTest(unittest.TestCase):
return object(), {'status': 200}
def _assert_ongoing_window_with_v1_compatible_header(self, module, url, headers, data=None, method=None):
- self.assertDictContainsSubset(
- {'Accept': 'application/vnd.pagerduty+json;version=2'},
- headers,
+ self.assertEqual(
+ 'application/vnd.pagerduty+json;version=2',
+ headers.get('Accept'),
'Accept:application/vnd.pagerduty+json;version=2 HTTP header not found'
)
return object(), {'status': 200}
@@ -36,17 +36,17 @@ class PagerDutyTest(unittest.TestCase):
return object(), {'status': 201}
def _assert_create_a_maintenance_window_from_header(self, module, url, headers, data=None, method=None):
- self.assertDictContainsSubset(
- {'From': 'requester_id'},
- headers,
+ self.assertEqual(
+ 'requester_id',
+ headers.get('From'),
'From:requester_id HTTP header not found'
)
return object(), {'status': 201}
def _assert_create_window_with_v1_compatible_header(self, module, url, headers, data=None, method=None):
- self.assertDictContainsSubset(
- {'Accept': 'application/vnd.pagerduty+json;version=2'},
- headers,
+ self.assertEqual(
+ 'application/vnd.pagerduty+json;version=2',
+ headers.get('Accept'),
'Accept:application/vnd.pagerduty+json;version=2 HTTP header not found'
)
return object(), {'status': 201}
@@ -54,9 +54,9 @@ class PagerDutyTest(unittest.TestCase):
def _assert_create_window_payload(self, module, url, headers, data=None, method=None):
payload = json.loads(data)
window_data = payload['maintenance_window']
- self.assertTrue('start_time' in window_data, '"start_time" is requiered attribute')
- self.assertTrue('end_time' in window_data, '"end_time" is requiered attribute')
- self.assertTrue('services' in window_data, '"services" is requiered attribute')
+ self.assertTrue('start_time' in window_data, '"start_time" is required attribute')
+ self.assertTrue('end_time' in window_data, '"end_time" is required attribute')
+ self.assertTrue('services' in window_data, '"services" is required attribute')
return object(), {'status': 201}
def _assert_create_window_single_service(self, module, url, headers, data=None, method=None):
@@ -89,9 +89,9 @@ class PagerDutyTest(unittest.TestCase):
return object(), {'status': 204}
def _assert_absent_window_with_v1_compatible_header(self, module, url, headers, method=None):
- self.assertDictContainsSubset(
- {'Accept': 'application/vnd.pagerduty+json;version=2'},
- headers,
+ self.assertEqual(
+ 'application/vnd.pagerduty+json;version=2',
+ headers.get('Accept'),
'Accept:application/vnd.pagerduty+json;version=2 HTTP header not found'
)
return object(), {'status': 204}
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_pagerduty_alert.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_pagerduty_alert.py
index 3df992b42..7a1e951a2 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_pagerduty_alert.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_pagerduty_alert.py
@@ -7,6 +7,10 @@ __metaclass__ = type
from ansible_collections.community.general.tests.unit.compat import unittest
from ansible_collections.community.general.plugins.modules import pagerduty_alert
+import json
+import pytest
+from ansible_collections.community.general.tests.unit.compat.mock import patch
+from ansible_collections.community.general.tests.unit.plugins.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
class PagerDutyAlertsTest(unittest.TestCase):
@@ -18,9 +22,9 @@ class PagerDutyAlertsTest(unittest.TestCase):
return Response(), {'status': 200}
def _assert_compatibility_header(self, module, url, method, headers):
- self.assertDictContainsSubset(
- {'Accept': 'application/vnd.pagerduty+json;version=2'},
- headers,
+ self.assertEqual(
+ 'application/vnd.pagerduty+json;version=2',
+ headers.get('Accept'),
'Accept:application/vnd.pagerduty+json;version=2 HTTP header not found'
)
return Response(), {'status': 200}
@@ -44,3 +48,106 @@ class PagerDutyAlertsTest(unittest.TestCase):
class Response(object):
def read(self):
return '{"incidents":[{"id": "incident_id", "status": "triggered"}]}'
+
+
+class TestPagerDutyAlertModule(ModuleTestCase):
+ def setUp(self):
+ super(TestPagerDutyAlertModule, self).setUp()
+ self.module = pagerduty_alert
+
+ def tearDown(self):
+ super(TestPagerDutyAlertModule, self).tearDown()
+
+ @pytest.fixture
+ def fetch_url_mock(self, mocker):
+ return mocker.patch('ansible.module_utils.monitoring.pagerduty_change.fetch_url')
+
+ def test_module_fail_when_required_args_missing(self):
+ with self.assertRaises(AnsibleFailJson):
+ set_module_args({})
+ self.module.main()
+
+ def test_ensure_alert_created_with_minimal_data(self):
+ set_module_args({
+ 'state': 'triggered',
+ 'api_version': 'v2',
+ 'integration_key': 'test',
+ 'source': 'My Ansible Script',
+ 'desc': 'Description for alert'
+ })
+
+ with patch.object(pagerduty_alert, 'fetch_url') as fetch_url_mock:
+ fetch_url_mock.return_value = (Response(), {"status": 202})
+ with self.assertRaises(AnsibleExitJson):
+ self.module.main()
+
+ assert fetch_url_mock.call_count == 1
+ url = fetch_url_mock.call_args[0][1]
+ json_data = fetch_url_mock.call_args[1]['data']
+ data = json.loads(json_data)
+
+ assert url == 'https://events.pagerduty.com/v2/enqueue'
+ assert data['routing_key'] == 'test'
+ assert data['event_action'] == 'trigger'
+ assert data['payload']['summary'] == 'Description for alert'
+ assert data['payload']['source'] == 'My Ansible Script'
+ assert data['payload']['severity'] == 'critical'
+ assert data['payload']['timestamp'] is not None
+
+ def test_ensure_alert_created_with_full_data(self):
+ set_module_args({
+ 'api_version': 'v2',
+ 'component': 'mysql',
+ 'custom_details': {'environment': 'production', 'notes': 'this is a test note'},
+ 'desc': 'Description for alert',
+ 'incident_class': 'ping failure',
+ 'integration_key': 'test',
+ 'link_url': 'https://pagerduty.com',
+ 'link_text': 'PagerDuty',
+ 'state': 'triggered',
+ 'source': 'My Ansible Script',
+ })
+
+ with patch.object(pagerduty_alert, 'fetch_url') as fetch_url_mock:
+ fetch_url_mock.return_value = (Response(), {"status": 202})
+ with self.assertRaises(AnsibleExitJson):
+ self.module.main()
+
+ assert fetch_url_mock.call_count == 1
+ url = fetch_url_mock.call_args[0][1]
+ json_data = fetch_url_mock.call_args[1]['data']
+ data = json.loads(json_data)
+
+ assert url == 'https://events.pagerduty.com/v2/enqueue'
+ assert data['routing_key'] == 'test'
+ assert data['payload']['summary'] == 'Description for alert'
+ assert data['payload']['source'] == 'My Ansible Script'
+ assert data['payload']['class'] == 'ping failure'
+ assert data['payload']['component'] == 'mysql'
+ assert data['payload']['custom_details']['environment'] == 'production'
+ assert data['payload']['custom_details']['notes'] == 'this is a test note'
+ assert data['links'][0]['href'] == 'https://pagerduty.com'
+ assert data['links'][0]['text'] == 'PagerDuty'
+
+ def test_ensure_alert_acknowledged(self):
+ set_module_args({
+ 'state': 'acknowledged',
+ 'api_version': 'v2',
+ 'integration_key': 'test',
+ 'incident_key': 'incident_test_id',
+ })
+
+ with patch.object(pagerduty_alert, 'fetch_url') as fetch_url_mock:
+ fetch_url_mock.return_value = (Response(), {"status": 202})
+ with self.assertRaises(AnsibleExitJson):
+ self.module.main()
+
+ assert fetch_url_mock.call_count == 1
+ url = fetch_url_mock.call_args[0][1]
+ json_data = fetch_url_mock.call_args[1]['data']
+ data = json.loads(json_data)
+
+ assert url == 'https://events.pagerduty.com/v2/enqueue'
+ assert data['routing_key'] == 'test'
+ assert data['event_action'] == 'acknowledge'
+ assert data['dedup_key'] == 'incident_test_id'
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_pkgin.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_pkgin.py
index d73911e0c..dea5a05b5 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_pkgin.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_pkgin.py
@@ -30,7 +30,7 @@ class TestPkginQueryPackage(unittest.TestCase):
command_result = pkgin.query_package(mock_module, package)
# then
- self.assertEquals(command_result, pkgin.PackageState.PRESENT)
+ self.assertEqual(command_result, pkgin.PackageState.PRESENT)
@mock.patch('ansible_collections.community.general.plugins.modules.pkgin.AnsibleModule')
def test_package_with_version_is_present(self, mock_module):
@@ -46,7 +46,7 @@ class TestPkginQueryPackage(unittest.TestCase):
command_result = pkgin.query_package(mock_module, package)
# then
- self.assertEquals(command_result, pkgin.PackageState.PRESENT)
+ self.assertEqual(command_result, pkgin.PackageState.PRESENT)
@mock.patch('ansible_collections.community.general.plugins.modules.pkgin.AnsibleModule')
def test_package_found_but_not_installed(self, mock_module):
@@ -62,7 +62,7 @@ class TestPkginQueryPackage(unittest.TestCase):
command_result = pkgin.query_package(mock_module, package)
# then
- self.assertEquals(command_result, pkgin.PackageState.NOT_INSTALLED)
+ self.assertEqual(command_result, pkgin.PackageState.NOT_INSTALLED)
@mock.patch('ansible_collections.community.general.plugins.modules.pkgin.AnsibleModule')
def test_package_found_outdated(self, mock_module):
@@ -78,7 +78,7 @@ class TestPkginQueryPackage(unittest.TestCase):
command_result = pkgin.query_package(mock_module, package)
# then
- self.assertEquals(command_result, pkgin.PackageState.OUTDATED)
+ self.assertEqual(command_result, pkgin.PackageState.OUTDATED)
@mock.patch('ansible_collections.community.general.plugins.modules.pkgin.AnsibleModule')
def test_package_with_version_found_outdated(self, mock_module):
@@ -94,7 +94,7 @@ class TestPkginQueryPackage(unittest.TestCase):
command_result = pkgin.query_package(mock_module, package)
# then
- self.assertEquals(command_result, pkgin.PackageState.OUTDATED)
+ self.assertEqual(command_result, pkgin.PackageState.OUTDATED)
@mock.patch('ansible_collections.community.general.plugins.modules.pkgin.AnsibleModule')
def test_package_not_found(self, mock_module):
@@ -110,7 +110,7 @@ class TestPkginQueryPackage(unittest.TestCase):
command_result = pkgin.query_package(mock_module, package)
# then
- self.assertEquals(command_result, pkgin.PackageState.NOT_FOUND)
+ self.assertEqual(command_result, pkgin.PackageState.NOT_FOUND)
@mock.patch('ansible_collections.community.general.plugins.modules.pkgin.AnsibleModule')
def test_with_parseable_flag_supported_package_is_present(self, mock_module):
@@ -126,7 +126,7 @@ class TestPkginQueryPackage(unittest.TestCase):
command_result = pkgin.query_package(mock_module, package)
# then
- self.assertEquals(command_result, pkgin.PackageState.PRESENT)
+ self.assertEqual(command_result, pkgin.PackageState.PRESENT)
@mock.patch('ansible_collections.community.general.plugins.modules.pkgin.AnsibleModule')
def test_with_parseable_flag_not_supported_package_is_present(self, mock_module):
@@ -142,4 +142,4 @@ class TestPkginQueryPackage(unittest.TestCase):
command_result = pkgin.query_package(mock_module, package)
# then
- self.assertEquals(command_result, pkgin.PackageState.PRESENT)
+ self.assertEqual(command_result, pkgin.PackageState.PRESENT)
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_kvm.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_kvm.py
index 531185102..4e2cf032c 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_kvm.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_kvm.py
@@ -4,17 +4,165 @@
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-from ansible_collections.community.general.plugins.modules.proxmox_kvm import parse_dev, parse_mac
+import sys
+
+import pytest
+
+proxmoxer = pytest.importorskip("proxmoxer")
+mandatory_py_version = pytest.mark.skipif(
+ sys.version_info < (2, 7),
+ reason="The proxmoxer dependency requires python2.7 or higher",
+)
+
+from ansible_collections.community.general.plugins.modules import proxmox_kvm
+from ansible_collections.community.general.tests.unit.compat.mock import (
+ patch,
+ DEFAULT,
+)
+from ansible_collections.community.general.tests.unit.plugins.modules.utils import (
+ AnsibleExitJson,
+ AnsibleFailJson,
+ ModuleTestCase,
+ set_module_args,
+)
+import ansible_collections.community.general.plugins.module_utils.proxmox as proxmox_utils
+
+
+class TestProxmoxKvmModule(ModuleTestCase):
+ def setUp(self):
+ super(TestProxmoxKvmModule, self).setUp()
+ proxmox_utils.HAS_PROXMOXER = True
+ self.module = proxmox_kvm
+ self.connect_mock = patch(
+ "ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect"
+ ).start()
+ self.get_node_mock = patch.object(
+ proxmox_utils.ProxmoxAnsible, "get_node"
+ ).start()
+ self.get_vm_mock = patch.object(proxmox_utils.ProxmoxAnsible, "get_vm").start()
+ self.create_vm_mock = patch.object(
+ proxmox_kvm.ProxmoxKvmAnsible, "create_vm"
+ ).start()
+
+ def tearDown(self):
+ self.create_vm_mock.stop()
+ self.get_vm_mock.stop()
+ self.get_node_mock.stop()
+ self.connect_mock.stop()
+ super(TestProxmoxKvmModule, self).tearDown()
+
+ def test_module_fail_when_required_args_missing(self):
+ with self.assertRaises(AnsibleFailJson):
+ set_module_args({})
+ self.module.main()
+
+ def test_module_exits_unchaged_when_provided_vmid_exists(self):
+ set_module_args(
+ {
+ "api_host": "host",
+ "api_user": "user",
+ "api_password": "password",
+ "vmid": "100",
+ "node": "pve",
+ }
+ )
+ self.get_vm_mock.return_value = [{"vmid": "100"}]
+ with pytest.raises(AnsibleExitJson) as exc_info:
+ self.module.main()
+
+ assert self.get_vm_mock.call_count == 1
+ result = exc_info.value.args[0]
+ assert result["changed"] is False
+ assert result["msg"] == "VM with vmid <100> already exists"
+
+ def test_vm_created_when_vmid_not_exist_but_name_already_exist(self):
+ set_module_args(
+ {
+ "api_host": "host",
+ "api_user": "user",
+ "api_password": "password",
+ "vmid": "100",
+ "name": "existing.vm.local",
+ "node": "pve",
+ }
+ )
+ self.get_vm_mock.return_value = None
+ with pytest.raises(AnsibleExitJson) as exc_info:
+ self.module.main()
+
+ assert self.get_vm_mock.call_count == 1
+ assert self.get_node_mock.call_count == 1
+ result = exc_info.value.args[0]
+ assert result["changed"] is True
+ assert result["msg"] == "VM existing.vm.local with vmid 100 deployed"
+
+ def test_vm_not_created_when_name_already_exist_and_vmid_not_set(self):
+ set_module_args(
+ {
+ "api_host": "host",
+ "api_user": "user",
+ "api_password": "password",
+ "name": "existing.vm.local",
+ "node": "pve",
+ }
+ )
+ with patch.object(proxmox_utils.ProxmoxAnsible, "get_vmid") as get_vmid_mock:
+ get_vmid_mock.return_value = {
+ "vmid": 100,
+ "name": "existing.vm.local",
+ }
+ with pytest.raises(AnsibleExitJson) as exc_info:
+ self.module.main()
+
+ assert get_vmid_mock.call_count == 1
+ result = exc_info.value.args[0]
+ assert result["changed"] is False
+ def test_vm_created_when_name_doesnt_exist_and_vmid_not_set(self):
+ set_module_args(
+ {
+ "api_host": "host",
+ "api_user": "user",
+ "api_password": "password",
+ "name": "existing.vm.local",
+ "node": "pve",
+ }
+ )
+ self.get_vm_mock.return_value = None
+ with patch.multiple(
+ proxmox_utils.ProxmoxAnsible, get_vmid=DEFAULT, get_nextvmid=DEFAULT
+ ) as utils_mock:
+ utils_mock["get_vmid"].return_value = None
+ utils_mock["get_nextvmid"].return_value = 101
+ with pytest.raises(AnsibleExitJson) as exc_info:
+ self.module.main()
-def test_parse_mac():
- assert parse_mac('virtio=00:11:22:AA:BB:CC,bridge=vmbr0,firewall=1') == '00:11:22:AA:BB:CC'
+ assert utils_mock["get_vmid"].call_count == 1
+ assert utils_mock["get_nextvmid"].call_count == 1
+ result = exc_info.value.args[0]
+ assert result["changed"] is True
+ assert result["msg"] == "VM existing.vm.local with vmid 101 deployed"
+ def test_parse_mac(self):
+ assert (
+ proxmox_kvm.parse_mac("virtio=00:11:22:AA:BB:CC,bridge=vmbr0,firewall=1")
+ == "00:11:22:AA:BB:CC"
+ )
-def test_parse_dev():
- assert parse_dev('local-lvm:vm-1000-disk-0,format=qcow2') == 'local-lvm:vm-1000-disk-0'
- assert parse_dev('local-lvm:vm-101-disk-1,size=8G') == 'local-lvm:vm-101-disk-1'
- assert parse_dev('local-zfs:vm-1001-disk-0') == 'local-zfs:vm-1001-disk-0'
+ def test_parse_dev(self):
+ assert (
+ proxmox_kvm.parse_dev("local-lvm:vm-1000-disk-0,format=qcow2")
+ == "local-lvm:vm-1000-disk-0"
+ )
+ assert (
+ proxmox_kvm.parse_dev("local-lvm:vm-101-disk-1,size=8G")
+ == "local-lvm:vm-101-disk-1"
+ )
+ assert (
+ proxmox_kvm.parse_dev("local-zfs:vm-1001-disk-0")
+ == "local-zfs:vm-1001-disk-0"
+ )
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_snap.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_snap.py
index 4bdcaa8b7..545fcd1f5 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_snap.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_snap.py
@@ -7,8 +7,16 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
+import sys
+
import pytest
+proxmoxer = pytest.importorskip('proxmoxer')
+mandatory_py_version = pytest.mark.skipif(
+ sys.version_info < (2, 7),
+ reason='The proxmoxer dependency requires python2.7 or higher'
+)
+
from ansible_collections.community.general.tests.unit.compat.mock import MagicMock, patch
from ansible_collections.community.general.plugins.modules import proxmox_snap
import ansible_collections.community.general.plugins.module_utils.proxmox as proxmox_utils
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_storage_contents_info.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_storage_contents_info.py
new file mode 100644
index 000000000..df2625dba
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_storage_contents_info.py
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (c) 2023, Julian Vanden Broeck <julian.vandenbroeck at dalibo.com>
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import pytest
+
+proxmoxer = pytest.importorskip("proxmoxer")
+
+from ansible_collections.community.general.plugins.modules import proxmox_storage_contents_info
+from ansible_collections.community.general.tests.unit.compat.mock import patch
+from ansible_collections.community.general.tests.unit.plugins.modules.utils import (
+ AnsibleExitJson,
+ AnsibleFailJson,
+ ModuleTestCase,
+ set_module_args,
+)
+import ansible_collections.community.general.plugins.module_utils.proxmox as proxmox_utils
+
+NODE1 = "pve"
+RAW_LIST_OUTPUT = [
+ {
+ "content": "backup",
+ "ctime": 1702528474,
+ "format": "pbs-vm",
+ "size": 273804166061,
+ "subtype": "qemu",
+ "vmid": 931,
+ "volid": "datastore:backup/vm/931/2023-12-14T04:34:34Z",
+ },
+ {
+ "content": "backup",
+ "ctime": 1702582560,
+ "format": "pbs-vm",
+ "size": 273804166059,
+ "subtype": "qemu",
+ "vmid": 931,
+ "volid": "datastore:backup/vm/931/2023-12-14T19:36:00Z",
+ },
+]
+
+
+def get_module_args(node, storage, content="all", vmid=None):
+ return {
+ "api_host": "host",
+ "api_user": "user",
+ "api_password": "password",
+ "node": node,
+ "storage": storage,
+ "content": content,
+ "vmid": vmid,
+ }
+
+
+class TestProxmoxStorageContentsInfo(ModuleTestCase):
+ def setUp(self):
+ super(TestProxmoxStorageContentsInfo, self).setUp()
+ proxmox_utils.HAS_PROXMOXER = True
+ self.module = proxmox_storage_contents_info
+ self.connect_mock = patch(
+ "ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect",
+ ).start()
+ self.connect_mock.return_value.nodes.return_value.storage.return_value.content.return_value.get.return_value = (
+ RAW_LIST_OUTPUT
+ )
+ self.connect_mock.return_value.nodes.get.return_value = [{"node": NODE1}]
+
+ def tearDown(self):
+ self.connect_mock.stop()
+ super(TestProxmoxStorageContentsInfo, self).tearDown()
+
+ def test_module_fail_when_required_args_missing(self):
+ with pytest.raises(AnsibleFailJson) as exc_info:
+ set_module_args({})
+ self.module.main()
+
+ def test_storage_contents_info(self):
+ with pytest.raises(AnsibleExitJson) as exc_info:
+ set_module_args(get_module_args(node=NODE1, storage="datastore"))
+ expected_output = {}
+ self.module.main()
+
+ result = exc_info.value.args[0]
+ assert not result["changed"]
+ assert result["proxmox_storage_content"] == RAW_LIST_OUTPUT
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_tasks_info.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_tasks_info.py
index 0d1b5a7bf..5c228655b 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_tasks_info.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_tasks_info.py
@@ -10,8 +10,16 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import pytest
import json
+import sys
+
+import pytest
+
+proxmoxer = pytest.importorskip('proxmoxer')
+mandatory_py_version = pytest.mark.skipif(
+ sys.version_info < (2, 7),
+ reason='The proxmoxer dependency requires python2.7 or higher'
+)
from ansible_collections.community.general.plugins.modules import proxmox_tasks_info
import ansible_collections.community.general.plugins.module_utils.proxmox as proxmox_utils
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_template.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_template.py
new file mode 100644
index 000000000..dc09a44b3
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_template.py
@@ -0,0 +1,66 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (c) 2023, Sergei Antipov <greendayonfire at gmail.com>
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import os
+import sys
+
+import pytest
+
+proxmoxer = pytest.importorskip('proxmoxer')
+mandatory_py_version = pytest.mark.skipif(
+ sys.version_info < (2, 7),
+ reason='The proxmoxer dependency requires python2.7 or higher'
+)
+
+from ansible_collections.community.general.plugins.modules import proxmox_template
+from ansible_collections.community.general.tests.unit.compat.mock import patch, Mock
+from ansible_collections.community.general.tests.unit.plugins.modules.utils import (
+ AnsibleFailJson,
+ ModuleTestCase,
+ set_module_args,
+)
+import ansible_collections.community.general.plugins.module_utils.proxmox as proxmox_utils
+
+
+class TestProxmoxTemplateModule(ModuleTestCase):
+ def setUp(self):
+ super(TestProxmoxTemplateModule, self).setUp()
+ proxmox_utils.HAS_PROXMOXER = True
+ self.module = proxmox_template
+ self.connect_mock = patch(
+ "ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect"
+ )
+ self.connect_mock.start()
+
+ def tearDown(self):
+ self.connect_mock.stop()
+ super(TestProxmoxTemplateModule, self).tearDown()
+
+ @patch("os.stat")
+ @patch.multiple(os.path, exists=Mock(return_value=True), isfile=Mock(return_value=True))
+ def test_module_fail_when_toolbelt_not_installed_and_file_size_is_big(self, mock_stat):
+ self.module.HAS_REQUESTS_TOOLBELT = False
+ mock_stat.return_value.st_size = 268435460
+ set_module_args(
+ {
+ "api_host": "host",
+ "api_user": "user",
+ "api_password": "password",
+ "node": "pve",
+ "src": "/tmp/mock.iso",
+ "content_type": "iso"
+ }
+ )
+ with pytest.raises(AnsibleFailJson) as exc_info:
+ self.module.main()
+
+ result = exc_info.value.args[0]
+ assert result["failed"] is True
+ assert result["msg"] == "'requests_toolbelt' module is required to upload files larger than 256MB"
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_vm_info.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_vm_info.py
new file mode 100644
index 000000000..94bbbc948
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_proxmox_vm_info.py
@@ -0,0 +1,714 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (c) 2023, Sergei Antipov <greendayonfire at gmail.com>
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import sys
+
+import pytest
+
+proxmoxer = pytest.importorskip("proxmoxer")
+mandatory_py_version = pytest.mark.skipif(
+ sys.version_info < (2, 7),
+ reason="The proxmoxer dependency requires python2.7 or higher",
+)
+
+from ansible_collections.community.general.plugins.modules import proxmox_vm_info
+from ansible_collections.community.general.tests.unit.compat.mock import patch
+from ansible_collections.community.general.tests.unit.plugins.modules.utils import (
+ AnsibleExitJson,
+ AnsibleFailJson,
+ ModuleTestCase,
+ set_module_args,
+)
+import ansible_collections.community.general.plugins.module_utils.proxmox as proxmox_utils
+
+NODE1 = "pve"
+NODE2 = "pve2"
+RAW_CLUSTER_OUTPUT = [
+ {
+ "cpu": 0.174069059487628,
+ "disk": 0,
+ "diskread": 6656,
+ "diskwrite": 0,
+ "id": "qemu/100",
+ "maxcpu": 1,
+ "maxdisk": 34359738368,
+ "maxmem": 4294967296,
+ "mem": 35304543,
+ "name": "pxe.home.arpa",
+ "netin": 416956,
+ "netout": 17330,
+ "node": NODE1,
+ "status": "running",
+ "template": 0,
+ "type": "qemu",
+ "uptime": 669,
+ "vmid": 100,
+ },
+ {
+ "cpu": 0,
+ "disk": 0,
+ "diskread": 0,
+ "diskwrite": 0,
+ "id": "qemu/101",
+ "maxcpu": 1,
+ "maxdisk": 0,
+ "maxmem": 536870912,
+ "mem": 0,
+ "name": "test1",
+ "netin": 0,
+ "netout": 0,
+ "node": NODE2,
+ "pool": "pool1",
+ "status": "stopped",
+ "template": 0,
+ "type": "qemu",
+ "uptime": 0,
+ "vmid": 101,
+ },
+ {
+ "cpu": 0,
+ "disk": 352190464,
+ "diskread": 0,
+ "diskwrite": 0,
+ "id": "lxc/102",
+ "maxcpu": 2,
+ "maxdisk": 10737418240,
+ "maxmem": 536870912,
+ "mem": 28192768,
+ "name": "test-lxc.home.arpa",
+ "netin": 102757,
+ "netout": 446,
+ "node": NODE1,
+ "status": "running",
+ "template": 0,
+ "type": "lxc",
+ "uptime": 161,
+ "vmid": 102,
+ },
+ {
+ "cpu": 0,
+ "disk": 0,
+ "diskread": 0,
+ "diskwrite": 0,
+ "id": "lxc/103",
+ "maxcpu": 2,
+ "maxdisk": 10737418240,
+ "maxmem": 536870912,
+ "mem": 0,
+ "name": "test1-lxc.home.arpa",
+ "netin": 0,
+ "netout": 0,
+ "node": NODE2,
+ "pool": "pool1",
+ "status": "stopped",
+ "template": 0,
+ "type": "lxc",
+ "uptime": 0,
+ "vmid": 103,
+ },
+ {
+ "cpu": 0,
+ "disk": 0,
+ "diskread": 0,
+ "diskwrite": 0,
+ "id": "lxc/104",
+ "maxcpu": 2,
+ "maxdisk": 10737418240,
+ "maxmem": 536870912,
+ "mem": 0,
+ "name": "test-lxc.home.arpa",
+ "netin": 0,
+ "netout": 0,
+ "node": NODE2,
+ "pool": "pool1",
+ "status": "stopped",
+ "template": 0,
+ "type": "lxc",
+ "uptime": 0,
+ "vmid": 104,
+ },
+ {
+ "cpu": 0,
+ "disk": 0,
+ "diskread": 0,
+ "diskwrite": 0,
+ "id": "lxc/105",
+ "maxcpu": 2,
+ "maxdisk": 10737418240,
+ "maxmem": 536870912,
+ "mem": 0,
+ "name": "",
+ "netin": 0,
+ "netout": 0,
+ "node": NODE2,
+ "pool": "pool1",
+ "status": "stopped",
+ "template": 0,
+ "type": "lxc",
+ "uptime": 0,
+ "vmid": 105,
+ },
+]
+RAW_LXC_OUTPUT = [
+ {
+ "cpu": 0,
+ "cpus": 2,
+ "disk": 0,
+ "diskread": 0,
+ "diskwrite": 0,
+ "maxdisk": 10737418240,
+ "maxmem": 536870912,
+ "maxswap": 536870912,
+ "mem": 0,
+ "name": "test1-lxc.home.arpa",
+ "netin": 0,
+ "netout": 0,
+ "status": "stopped",
+ "swap": 0,
+ "type": "lxc",
+ "uptime": 0,
+ "vmid": "103",
+ },
+ {
+ "cpu": 0,
+ "cpus": 2,
+ "disk": 352190464,
+ "diskread": 0,
+ "diskwrite": 0,
+ "maxdisk": 10737418240,
+ "maxmem": 536870912,
+ "maxswap": 536870912,
+ "mem": 28192768,
+ "name": "test-lxc.home.arpa",
+ "netin": 102757,
+ "netout": 446,
+ "pid": 4076752,
+ "status": "running",
+ "swap": 0,
+ "type": "lxc",
+ "uptime": 161,
+ "vmid": "102",
+ },
+ {
+ "cpu": 0,
+ "cpus": 2,
+ "disk": 0,
+ "diskread": 0,
+ "diskwrite": 0,
+ "maxdisk": 10737418240,
+ "maxmem": 536870912,
+ "maxswap": 536870912,
+ "mem": 0,
+ "name": "test-lxc.home.arpa",
+ "netin": 0,
+ "netout": 0,
+ "status": "stopped",
+ "swap": 0,
+ "type": "lxc",
+ "uptime": 0,
+ "vmid": "104",
+ },
+ {
+ "cpu": 0,
+ "cpus": 2,
+ "disk": 0,
+ "diskread": 0,
+ "diskwrite": 0,
+ "maxdisk": 10737418240,
+ "maxmem": 536870912,
+ "maxswap": 536870912,
+ "mem": 0,
+ "name": "",
+ "netin": 0,
+ "netout": 0,
+ "status": "stopped",
+ "swap": 0,
+ "type": "lxc",
+ "uptime": 0,
+ "vmid": "105",
+ },
+]
+RAW_QEMU_OUTPUT = [
+ {
+ "cpu": 0,
+ "cpus": 1,
+ "disk": 0,
+ "diskread": 0,
+ "diskwrite": 0,
+ "maxdisk": 0,
+ "maxmem": 536870912,
+ "mem": 0,
+ "name": "test1",
+ "netin": 0,
+ "netout": 0,
+ "status": "stopped",
+ "uptime": 0,
+ "vmid": 101,
+ },
+ {
+ "cpu": 0.174069059487628,
+ "cpus": 1,
+ "disk": 0,
+ "diskread": 6656,
+ "diskwrite": 0,
+ "maxdisk": 34359738368,
+ "maxmem": 4294967296,
+ "mem": 35304543,
+ "name": "pxe.home.arpa",
+ "netin": 416956,
+ "netout": 17330,
+ "pid": 4076688,
+ "status": "running",
+ "uptime": 669,
+ "vmid": 100,
+ },
+]
+EXPECTED_VMS_OUTPUT = [
+ {
+ "cpu": 0.174069059487628,
+ "cpus": 1,
+ "disk": 0,
+ "diskread": 6656,
+ "diskwrite": 0,
+ "id": "qemu/100",
+ "maxcpu": 1,
+ "maxdisk": 34359738368,
+ "maxmem": 4294967296,
+ "mem": 35304543,
+ "name": "pxe.home.arpa",
+ "netin": 416956,
+ "netout": 17330,
+ "node": NODE1,
+ "pid": 4076688,
+ "status": "running",
+ "template": False,
+ "type": "qemu",
+ "uptime": 669,
+ "vmid": 100,
+ },
+ {
+ "cpu": 0,
+ "cpus": 1,
+ "disk": 0,
+ "diskread": 0,
+ "diskwrite": 0,
+ "id": "qemu/101",
+ "maxcpu": 1,
+ "maxdisk": 0,
+ "maxmem": 536870912,
+ "mem": 0,
+ "name": "test1",
+ "netin": 0,
+ "netout": 0,
+ "node": NODE2,
+ "pool": "pool1",
+ "status": "stopped",
+ "template": False,
+ "type": "qemu",
+ "uptime": 0,
+ "vmid": 101,
+ },
+ {
+ "cpu": 0,
+ "cpus": 2,
+ "disk": 352190464,
+ "diskread": 0,
+ "diskwrite": 0,
+ "id": "lxc/102",
+ "maxcpu": 2,
+ "maxdisk": 10737418240,
+ "maxmem": 536870912,
+ "maxswap": 536870912,
+ "mem": 28192768,
+ "name": "test-lxc.home.arpa",
+ "netin": 102757,
+ "netout": 446,
+ "node": NODE1,
+ "pid": 4076752,
+ "status": "running",
+ "swap": 0,
+ "template": False,
+ "type": "lxc",
+ "uptime": 161,
+ "vmid": 102,
+ },
+ {
+ "cpu": 0,
+ "cpus": 2,
+ "disk": 0,
+ "diskread": 0,
+ "diskwrite": 0,
+ "id": "lxc/103",
+ "maxcpu": 2,
+ "maxdisk": 10737418240,
+ "maxmem": 536870912,
+ "maxswap": 536870912,
+ "mem": 0,
+ "name": "test1-lxc.home.arpa",
+ "netin": 0,
+ "netout": 0,
+ "node": NODE2,
+ "pool": "pool1",
+ "status": "stopped",
+ "swap": 0,
+ "template": False,
+ "type": "lxc",
+ "uptime": 0,
+ "vmid": 103,
+ },
+ {
+ "cpu": 0,
+ "cpus": 2,
+ "disk": 0,
+ "diskread": 0,
+ "diskwrite": 0,
+ "id": "lxc/104",
+ "maxcpu": 2,
+ "maxdisk": 10737418240,
+ "maxmem": 536870912,
+ "maxswap": 536870912,
+ "mem": 0,
+ "name": "test-lxc.home.arpa",
+ "netin": 0,
+ "netout": 0,
+ "node": NODE2,
+ "pool": "pool1",
+ "status": "stopped",
+ "swap": 0,
+ "template": False,
+ "type": "lxc",
+ "uptime": 0,
+ "vmid": 104,
+ },
+ {
+ "cpu": 0,
+ "cpus": 2,
+ "disk": 0,
+ "diskread": 0,
+ "diskwrite": 0,
+ "id": "lxc/105",
+ "maxcpu": 2,
+ "maxdisk": 10737418240,
+ "maxmem": 536870912,
+ "maxswap": 536870912,
+ "mem": 0,
+ "name": "",
+ "netin": 0,
+ "netout": 0,
+ "node": NODE2,
+ "pool": "pool1",
+ "status": "stopped",
+ "swap": 0,
+ "template": False,
+ "type": "lxc",
+ "uptime": 0,
+ "vmid": 105,
+ },
+]
+
+
+def get_module_args(type="all", node=None, vmid=None, name=None, config="none"):
+ return {
+ "api_host": "host",
+ "api_user": "user",
+ "api_password": "password",
+ "node": node,
+ "type": type,
+ "vmid": vmid,
+ "name": name,
+ "config": config,
+ }
+
+
+class TestProxmoxVmInfoModule(ModuleTestCase):
+ def setUp(self):
+ super(TestProxmoxVmInfoModule, self).setUp()
+ proxmox_utils.HAS_PROXMOXER = True
+ self.module = proxmox_vm_info
+ self.connect_mock = patch(
+ "ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect",
+ ).start()
+ self.connect_mock.return_value.nodes.return_value.lxc.return_value.get.return_value = (
+ RAW_LXC_OUTPUT
+ )
+ self.connect_mock.return_value.nodes.return_value.qemu.return_value.get.return_value = (
+ RAW_QEMU_OUTPUT
+ )
+ self.connect_mock.return_value.cluster.return_value.resources.return_value.get.return_value = (
+ RAW_CLUSTER_OUTPUT
+ )
+ self.connect_mock.return_value.nodes.get.return_value = [{"node": NODE1}]
+
+ def tearDown(self):
+ self.connect_mock.stop()
+ super(TestProxmoxVmInfoModule, self).tearDown()
+
+ def test_module_fail_when_required_args_missing(self):
+ with pytest.raises(AnsibleFailJson) as exc_info:
+ set_module_args({})
+ self.module.main()
+
+ result = exc_info.value.args[0]
+ assert result["msg"] == "missing required arguments: api_host, api_user"
+
+ def test_get_lxc_vms_information(self):
+ with pytest.raises(AnsibleExitJson) as exc_info:
+ set_module_args(get_module_args(type="lxc"))
+ expected_output = [vm for vm in EXPECTED_VMS_OUTPUT if vm["type"] == "lxc"]
+ self.module.main()
+
+ result = exc_info.value.args[0]
+ assert result["changed"] is False
+ assert result["proxmox_vms"] == expected_output
+
+ def test_get_qemu_vms_information(self):
+ with pytest.raises(AnsibleExitJson) as exc_info:
+ set_module_args(get_module_args(type="qemu"))
+ expected_output = [vm for vm in EXPECTED_VMS_OUTPUT if vm["type"] == "qemu"]
+ self.module.main()
+
+ result = exc_info.value.args[0]
+ assert result["proxmox_vms"] == expected_output
+
+ def test_get_all_vms_information(self):
+ with pytest.raises(AnsibleExitJson) as exc_info:
+ set_module_args(get_module_args())
+ self.module.main()
+
+ result = exc_info.value.args[0]
+ assert result["proxmox_vms"] == EXPECTED_VMS_OUTPUT
+
+ def test_vmid_is_converted_to_int(self):
+ with pytest.raises(AnsibleExitJson) as exc_info:
+ set_module_args(get_module_args(type="lxc"))
+ self.module.main()
+
+ result = exc_info.value.args[0]
+ assert isinstance(result["proxmox_vms"][0]["vmid"], int)
+
+ def test_get_specific_lxc_vm_information(self):
+ with pytest.raises(AnsibleExitJson) as exc_info:
+ vmid = 102
+ expected_output = [
+ vm
+ for vm in EXPECTED_VMS_OUTPUT
+ if vm["vmid"] == vmid and vm["type"] == "lxc"
+ ]
+ set_module_args(get_module_args(type="lxc", vmid=vmid))
+ self.module.main()
+
+ result = exc_info.value.args[0]
+ assert result["proxmox_vms"] == expected_output
+ assert len(result["proxmox_vms"]) == 1
+
+ def test_get_specific_qemu_vm_information(self):
+ with pytest.raises(AnsibleExitJson) as exc_info:
+ vmid = 100
+ expected_output = [
+ vm
+ for vm in EXPECTED_VMS_OUTPUT
+ if vm["vmid"] == vmid and vm["type"] == "qemu"
+ ]
+ set_module_args(get_module_args(type="qemu", vmid=vmid))
+ self.module.main()
+
+ result = exc_info.value.args[0]
+ assert result["proxmox_vms"] == expected_output
+ assert len(result["proxmox_vms"]) == 1
+
+ def test_get_specific_vm_information(self):
+ with pytest.raises(AnsibleExitJson) as exc_info:
+ vmid = 100
+ expected_output = [vm for vm in EXPECTED_VMS_OUTPUT if vm["vmid"] == vmid]
+ set_module_args(get_module_args(type="all", vmid=vmid))
+ self.module.main()
+
+ result = exc_info.value.args[0]
+ assert result["proxmox_vms"] == expected_output
+ assert len(result["proxmox_vms"]) == 1
+
+ def test_get_specific_vm_information_by_using_name(self):
+ name = "test1-lxc.home.arpa"
+ self.connect_mock.return_value.cluster.resources.get.return_value = [
+ {"name": name, "vmid": "103"}
+ ]
+
+ with pytest.raises(AnsibleExitJson) as exc_info:
+ expected_output = [vm for vm in EXPECTED_VMS_OUTPUT if vm["name"] == name]
+ set_module_args(get_module_args(type="all", name=name))
+ self.module.main()
+
+ result = exc_info.value.args[0]
+ assert result["proxmox_vms"] == expected_output
+ assert len(result["proxmox_vms"]) == 1
+
+ def test_get_multiple_vms_with_the_same_name(self):
+ name = "test-lxc.home.arpa"
+ self.connect_mock.return_value.cluster.resources.get.return_value = [
+ {"name": name, "vmid": "102"},
+ {"name": name, "vmid": "104"},
+ ]
+
+ with pytest.raises(AnsibleExitJson) as exc_info:
+ expected_output = [vm for vm in EXPECTED_VMS_OUTPUT if vm["name"] == name]
+ set_module_args(get_module_args(type="all", name=name))
+ self.module.main()
+
+ result = exc_info.value.args[0]
+ assert result["proxmox_vms"] == expected_output
+ assert len(result["proxmox_vms"]) == 2
+
+ def test_get_vm_with_an_empty_name(self):
+ name = ""
+ self.connect_mock.return_value.cluster.resources.get.return_value = [
+ {"name": name, "vmid": "105"},
+ ]
+
+ with pytest.raises(AnsibleExitJson) as exc_info:
+ expected_output = [vm for vm in EXPECTED_VMS_OUTPUT if vm["name"] == name]
+ set_module_args(get_module_args(type="all", name=name))
+ self.module.main()
+
+ result = exc_info.value.args[0]
+ assert result["proxmox_vms"] == expected_output
+ assert len(result["proxmox_vms"]) == 1
+
+ def test_get_all_lxc_vms_from_specific_node(self):
+ with pytest.raises(AnsibleExitJson) as exc_info:
+ expected_output = [
+ vm
+ for vm in EXPECTED_VMS_OUTPUT
+ if vm["node"] == NODE1 and vm["type"] == "lxc"
+ ]
+ set_module_args(get_module_args(type="lxc", node=NODE1))
+ self.module.main()
+
+ result = exc_info.value.args[0]
+ assert result["proxmox_vms"] == expected_output
+ assert len(result["proxmox_vms"]) == 1
+
+ def test_get_all_qemu_vms_from_specific_node(self):
+ with pytest.raises(AnsibleExitJson) as exc_info:
+ expected_output = [
+ vm
+ for vm in EXPECTED_VMS_OUTPUT
+ if vm["node"] == NODE1 and vm["type"] == "qemu"
+ ]
+ set_module_args(get_module_args(type="qemu", node=NODE1))
+ self.module.main()
+
+ result = exc_info.value.args[0]
+ assert result["proxmox_vms"] == expected_output
+ assert len(result["proxmox_vms"]) == 1
+
+ def test_get_all_vms_from_specific_node(self):
+ with pytest.raises(AnsibleExitJson) as exc_info:
+ expected_output = [vm for vm in EXPECTED_VMS_OUTPUT if vm["node"] == NODE1]
+ set_module_args(get_module_args(node=NODE1))
+ self.module.main()
+
+ result = exc_info.value.args[0]
+ assert result["proxmox_vms"] == expected_output
+ assert len(result["proxmox_vms"]) == 2
+
+ def test_module_returns_empty_list_when_vm_does_not_exist(self):
+ with pytest.raises(AnsibleExitJson) as exc_info:
+ vmid = 200
+ set_module_args(get_module_args(type="all", vmid=vmid))
+ self.module.main()
+
+ result = exc_info.value.args[0]
+ assert result["proxmox_vms"] == []
+
+ def test_module_fail_when_qemu_request_fails(self):
+ self.connect_mock.return_value.nodes.return_value.qemu.return_value.get.side_effect = IOError(
+ "Some mocked connection error."
+ )
+ with pytest.raises(AnsibleFailJson) as exc_info:
+ set_module_args(get_module_args(type="qemu"))
+ self.module.main()
+
+ result = exc_info.value.args[0]
+ assert "Failed to retrieve QEMU VMs information:" in result["msg"]
+
+ def test_module_fail_when_lxc_request_fails(self):
+ self.connect_mock.return_value.nodes.return_value.lxc.return_value.get.side_effect = IOError(
+ "Some mocked connection error."
+ )
+ with pytest.raises(AnsibleFailJson) as exc_info:
+ set_module_args(get_module_args(type="lxc"))
+ self.module.main()
+
+ result = exc_info.value.args[0]
+ assert "Failed to retrieve LXC VMs information:" in result["msg"]
+
+ def test_module_fail_when_cluster_resources_request_fails(self):
+ self.connect_mock.return_value.cluster.return_value.resources.return_value.get.side_effect = IOError(
+ "Some mocked connection error."
+ )
+ with pytest.raises(AnsibleFailJson) as exc_info:
+ set_module_args(get_module_args())
+ self.module.main()
+
+ result = exc_info.value.args[0]
+ assert (
+ "Failed to retrieve VMs information from cluster resources:"
+ in result["msg"]
+ )
+
+ def test_module_fail_when_node_does_not_exist(self):
+ with pytest.raises(AnsibleFailJson) as exc_info:
+ set_module_args(get_module_args(type="all", node="NODE3"))
+ self.module.main()
+
+ result = exc_info.value.args[0]
+ assert result["msg"] == "Node NODE3 doesn't exist in PVE cluster"
+
+ def test_call_to_get_vmid_is_not_used_when_vmid_provided(self):
+ with patch(
+ "ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible.get_vmid"
+ ) as get_vmid_mock:
+ with pytest.raises(AnsibleExitJson):
+ vmid = 100
+ set_module_args(
+ get_module_args(type="all", vmid=vmid, name="something")
+ )
+ self.module.main()
+
+ assert get_vmid_mock.call_count == 0
+
+ def test_config_returned_when_specified_qemu_vm_with_config(self):
+ config_vm_value = {
+ 'scsi0': 'local-lvm:vm-101-disk-0,iothread=1,size=32G',
+ 'net0': 'virtio=4E:79:9F:A8:EE:E4,bridge=vmbr0,firewall=1',
+ 'scsihw': 'virtio-scsi-single',
+ 'cores': 1,
+ 'name': 'test1',
+ 'ostype': 'l26',
+ 'boot': 'order=scsi0;ide2;net0',
+ 'memory': 2048,
+ 'sockets': 1,
+ }
+ (self.connect_mock.return_value.nodes.return_value.qemu.return_value.
+ config.return_value.get.return_value) = config_vm_value
+
+ with pytest.raises(AnsibleExitJson) as exc_info:
+ vmid = 101
+ set_module_args(get_module_args(
+ type="qemu",
+ vmid=vmid,
+ config="current",
+ ))
+ expected_output = [vm for vm in EXPECTED_VMS_OUTPUT if vm["vmid"] == vmid]
+ expected_output[0]["config"] = config_vm_value
+ self.module.main()
+
+ result = exc_info.value.args[0]
+ assert result["proxmox_vms"] == expected_output
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_puppet.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_puppet.py
index f62523e7f..57f88ada1 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_puppet.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_puppet.py
@@ -12,216 +12,9 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import json
-from collections import namedtuple
from ansible_collections.community.general.plugins.modules import puppet
+from .helper import Helper
-import pytest
-TESTED_MODULE = puppet.__name__
-
-
-ModuleTestCase = namedtuple("ModuleTestCase", ["id", "input", "output", "run_command_calls"])
-RunCmdCall = namedtuple("RunCmdCall", ["command", "environ", "rc", "out", "err"])
-
-
-@pytest.fixture
-def patch_get_bin_path(mocker):
- """
- Function used for mocking AnsibleModule.get_bin_path
- """
- def mockie(self, path, *args, **kwargs):
- return "/testbin/{0}".format(path)
- mocker.patch("ansible.module_utils.basic.AnsibleModule.get_bin_path", mockie)
-
-
-TEST_CASES = [
- ModuleTestCase(
- id="puppet_agent_plain",
- input={},
- output=dict(changed=False),
- run_command_calls=[
- RunCmdCall(
- command=["/testbin/puppet", "config", "print", "agent_disabled_lockfile"],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out="blah, anything",
- err="",
- ),
- RunCmdCall(
- command=[
- "/testbin/timeout", "-s", "9", "30m", "/testbin/puppet", "agent", "--onetime", "--no-daemonize",
- "--no-usecacheonfailure", "--no-splay", "--detailed-exitcodes", "--verbose", "--color", "0"
- ],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out="",
- err="",
- ),
- ]
- ),
- ModuleTestCase(
- id="puppet_agent_certname",
- input={"certname": "potatobox"},
- output=dict(changed=False),
- run_command_calls=[
- RunCmdCall(
- command=["/testbin/puppet", "config", "print", "agent_disabled_lockfile"],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out="blah, anything",
- err="",
- ),
- RunCmdCall(
- command=[
- "/testbin/timeout", "-s", "9", "30m", "/testbin/puppet", "agent", "--onetime", "--no-daemonize",
- "--no-usecacheonfailure", "--no-splay", "--detailed-exitcodes", "--verbose", "--color", "0", "--certname=potatobox"
- ],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out="",
- err="",
- ),
- ]
- ),
- ModuleTestCase(
- id="puppet_agent_tags_abc",
- input={"tags": ["a", "b", "c"]},
- output=dict(changed=False),
- run_command_calls=[
- RunCmdCall(
- command=["/testbin/puppet", "config", "print", "agent_disabled_lockfile"],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out="blah, anything",
- err="",
- ),
- RunCmdCall(
- command=[
- "/testbin/timeout", "-s", "9", "30m", "/testbin/puppet", "agent", "--onetime", "--no-daemonize",
- "--no-usecacheonfailure", "--no-splay", "--detailed-exitcodes", "--verbose", "--color", "0", "--tags", "a,b,c"
- ],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out="",
- err="",
- ),
- ]
- ),
- ModuleTestCase(
- id="puppet_agent_skip_tags_def",
- input={"skip_tags": ["d", "e", "f"]},
- output=dict(changed=False),
- run_command_calls=[
- RunCmdCall(
- command=["/testbin/puppet", "config", "print", "agent_disabled_lockfile"],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out="blah, anything",
- err="",
- ),
- RunCmdCall(
- command=[
- "/testbin/timeout", "-s", "9", "30m", "/testbin/puppet", "agent", "--onetime", "--no-daemonize",
- "--no-usecacheonfailure", "--no-splay", "--detailed-exitcodes", "--verbose", "--color", "0", "--skip_tags", "d,e,f"
- ],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out="",
- err="",
- ),
- ]
- ),
- ModuleTestCase(
- id="puppet_agent_noop_false",
- input={"noop": False},
- output=dict(changed=False),
- run_command_calls=[
- RunCmdCall(
- command=["/testbin/puppet", "config", "print", "agent_disabled_lockfile"],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out="blah, anything",
- err="",
- ),
- RunCmdCall(
- command=[
- "/testbin/timeout", "-s", "9", "30m", "/testbin/puppet", "agent", "--onetime", "--no-daemonize",
- "--no-usecacheonfailure", "--no-splay", "--detailed-exitcodes", "--verbose", "--color", "0", "--no-noop"
- ],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out="",
- err="",
- ),
- ]
- ),
- ModuleTestCase(
- id="puppet_agent_noop_true",
- input={"noop": True},
- output=dict(changed=False),
- run_command_calls=[
- RunCmdCall(
- command=["/testbin/puppet", "config", "print", "agent_disabled_lockfile"],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out="blah, anything",
- err="",
- ),
- RunCmdCall(
- command=[
- "/testbin/timeout", "-s", "9", "30m", "/testbin/puppet", "agent", "--onetime", "--no-daemonize",
- "--no-usecacheonfailure", "--no-splay", "--detailed-exitcodes", "--verbose", "--color", "0", "--noop"
- ],
- environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- rc=0,
- out="",
- err="",
- ),
- ]
- ),
-]
-TEST_CASES_IDS = [item.id for item in TEST_CASES]
-
-
-@pytest.mark.parametrize("patch_ansible_module, testcase",
- [[x.input, x] for x in TEST_CASES],
- ids=TEST_CASES_IDS,
- indirect=["patch_ansible_module"])
-@pytest.mark.usefixtures("patch_ansible_module")
-def test_puppet(mocker, capfd, patch_get_bin_path, testcase):
- """
- Run unit tests for test cases listen in TEST_CASES
- """
-
- run_cmd_calls = testcase.run_command_calls
-
- # Mock function used for running commands first
- call_results = [(x.rc, x.out, x.err) for x in run_cmd_calls]
- mock_run_command = mocker.patch(
- "ansible.module_utils.basic.AnsibleModule.run_command",
- side_effect=call_results)
-
- # Try to run test case
- with pytest.raises(SystemExit):
- puppet.main()
-
- out, err = capfd.readouterr()
- results = json.loads(out)
- print("testcase =\n%s" % str(testcase))
- print("results =\n%s" % results)
-
- assert mock_run_command.call_count == len(run_cmd_calls)
- if mock_run_command.call_count:
- call_args_list = [(item[0][0], item[1]) for item in mock_run_command.call_args_list]
- expected_call_args_list = [(item.command, item.environ) for item in run_cmd_calls]
- print("call args list =\n%s" % call_args_list)
- print("expected args list =\n%s" % expected_call_args_list)
- assert call_args_list == expected_call_args_list
-
- assert results.get("changed", False) == testcase.output["changed"]
- if "failed" in testcase:
- assert results.get("failed", False) == testcase.output["failed"]
- if "msg" in testcase:
- assert results.get("msg", "") == testcase.output["msg"]
+Helper.from_module(puppet, __name__)
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_puppet.yaml b/ansible_collections/community/general/tests/unit/plugins/modules/test_puppet.yaml
new file mode 100644
index 000000000..308be9797
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_puppet.yaml
@@ -0,0 +1,192 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) Alexei Znamensky (russoz@gmail.com)
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+---
+- id: puppet_agent_plain
+ input: {}
+ output:
+ changed: false
+ run_command_calls:
+ - command: [/testbin/puppet, config, print, agent_disabled_lockfile]
+ environ: &env-def {environ_update: {LANGUAGE: C, LC_ALL: C}, check_rc: false}
+ rc: 0
+ out: "blah, anything"
+ err: ""
+ - command:
+ - /testbin/timeout
+ - -s
+ - "9"
+ - 30m
+ - /testbin/puppet
+ - agent
+ - --onetime
+ - --no-daemonize
+ - --no-usecacheonfailure
+ - --no-splay
+ - --detailed-exitcodes
+ - --verbose
+ - --color
+ - "0"
+ environ: *env-def
+ rc: 0
+ out: ""
+ err: ""
+- id: puppet_agent_certname
+ input:
+ certname: potatobox
+ output:
+ changed: false
+ run_command_calls:
+ - command: [/testbin/puppet, config, print, agent_disabled_lockfile]
+ environ: *env-def
+ rc: 0
+ out: "blah, anything"
+ err: ""
+ - command:
+ - /testbin/timeout
+ - -s
+ - "9"
+ - 30m
+ - /testbin/puppet
+ - agent
+ - --onetime
+ - --no-daemonize
+ - --no-usecacheonfailure
+ - --no-splay
+ - --detailed-exitcodes
+ - --verbose
+ - --color
+ - "0"
+ - --certname=potatobox
+ environ: *env-def
+ rc: 0
+ out: ""
+ err: ""
+- id: puppet_agent_tags_abc
+ input:
+ tags: [a, b, c]
+ output:
+ changed: false
+ run_command_calls:
+ - command: [/testbin/puppet, config, print, agent_disabled_lockfile]
+ environ: *env-def
+ rc: 0
+ out: "blah, anything"
+ err: ""
+ - command:
+ - /testbin/timeout
+ - -s
+ - "9"
+ - 30m
+ - /testbin/puppet
+ - agent
+ - --onetime
+ - --no-daemonize
+ - --no-usecacheonfailure
+ - --no-splay
+ - --detailed-exitcodes
+ - --verbose
+ - --color
+ - "0"
+ - --tags
+ - a,b,c
+ environ: *env-def
+ rc: 0
+ out: ""
+ err: ""
+- id: puppet_agent_skip_tags_def
+ input:
+ skip_tags: [d, e, f]
+ output:
+ changed: false
+ run_command_calls:
+ - command: [/testbin/puppet, config, print, agent_disabled_lockfile]
+ environ: *env-def
+ rc: 0
+ out: "blah, anything"
+ err: ""
+ - command:
+ - /testbin/timeout
+ - -s
+ - "9"
+ - 30m
+ - /testbin/puppet
+ - agent
+ - --onetime
+ - --no-daemonize
+ - --no-usecacheonfailure
+ - --no-splay
+ - --detailed-exitcodes
+ - --verbose
+ - --color
+ - "0"
+ - --skip_tags
+ - d,e,f
+ environ: *env-def
+ rc: 0
+ out: ""
+ err: ""
+- id: puppet_agent_noop_false
+ input:
+ noop: false
+ output:
+ changed: false
+ run_command_calls:
+ - command: [/testbin/puppet, config, print, agent_disabled_lockfile]
+ environ: *env-def
+ rc: 0
+ out: "blah, anything"
+ err: ""
+ - command:
+ - /testbin/timeout
+ - -s
+ - "9"
+ - 30m
+ - /testbin/puppet
+ - agent
+ - --onetime
+ - --no-daemonize
+ - --no-usecacheonfailure
+ - --no-splay
+ - --detailed-exitcodes
+ - --verbose
+ - --color
+ - "0"
+ - --no-noop
+ environ: *env-def
+ rc: 0
+ out: ""
+ err: ""
+- id: puppet_agent_noop_true
+ input:
+ noop: true
+ output:
+ changed: false
+ run_command_calls:
+ - command: [/testbin/puppet, config, print, agent_disabled_lockfile]
+ environ: *env-def
+ rc: 0
+ out: "blah, anything"
+ err: ""
+ - command:
+ - /testbin/timeout
+ - -s
+ - "9"
+ - 30m
+ - /testbin/puppet
+ - agent
+ - --onetime
+ - --no-daemonize
+ - --no-usecacheonfailure
+ - --no-splay
+ - --detailed-exitcodes
+ - --verbose
+ - --color
+ - "0"
+ - --noop
+ environ: *env-def
+ rc: 0
+ out: ""
+ err: ""
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_redhat_subscription.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_redhat_subscription.py
index 4bf272916..9473d0d46 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_redhat_subscription.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_redhat_subscription.py
@@ -22,13 +22,15 @@ def patch_redhat_subscription(mocker):
"""
Function used for mocking some parts of redhat_subscription module
"""
- mocker.patch('ansible_collections.community.general.plugins.modules.redhat_subscription.RegistrationBase.REDHAT_REPO')
+ mocker.patch('ansible_collections.community.general.plugins.modules.redhat_subscription.Rhsm.REDHAT_REPO')
mocker.patch('ansible_collections.community.general.plugins.modules.redhat_subscription.isfile', return_value=False)
mocker.patch('ansible_collections.community.general.plugins.modules.redhat_subscription.unlink', return_value=True)
mocker.patch('ansible_collections.community.general.plugins.modules.redhat_subscription.AnsibleModule.get_bin_path',
return_value='/testbin/subscription-manager')
mocker.patch('ansible_collections.community.general.plugins.modules.redhat_subscription.Rhsm._can_connect_to_dbus',
return_value=False)
+ mocker.patch('ansible_collections.community.general.plugins.modules.redhat_subscription.Rhsm._has_dbus_interface',
+ return_value=False)
mocker.patch('ansible_collections.community.general.plugins.modules.redhat_subscription.getuid',
return_value=0)
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_redis_info.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_redis_info.py
index 8b30a2316..cdc78680e 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_redis_info.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_redis_info.py
@@ -50,7 +50,12 @@ class TestRedisInfoModule(ModuleTestCase):
set_module_args({})
self.module.main()
self.assertEqual(redis_client.call_count, 1)
- self.assertEqual(redis_client.call_args, ({'host': 'localhost', 'port': 6379, 'password': None},))
+ self.assertEqual(redis_client.call_args, ({'host': 'localhost',
+ 'port': 6379,
+ 'password': None,
+ 'ssl': False,
+ 'ssl_ca_certs': None,
+ 'ssl_cert_reqs': 'required'},))
self.assertEqual(result.exception.args[0]['info']['redis_version'], '999.999.999')
def test_with_parameters(self):
@@ -64,7 +69,34 @@ class TestRedisInfoModule(ModuleTestCase):
})
self.module.main()
self.assertEqual(redis_client.call_count, 1)
- self.assertEqual(redis_client.call_args, ({'host': 'test', 'port': 1234, 'password': 'PASS'},))
+ self.assertEqual(redis_client.call_args, ({'host': 'test',
+ 'port': 1234,
+ 'password': 'PASS',
+ 'ssl': False,
+ 'ssl_ca_certs': None,
+ 'ssl_cert_reqs': 'required'},))
+ self.assertEqual(result.exception.args[0]['info']['redis_version'], '999.999.999')
+
+ def test_with_tls_parameters(self):
+ """Test with tls parameters"""
+ with self.patch_redis_client(side_effect=FakeRedisClient) as redis_client:
+ with self.assertRaises(AnsibleExitJson) as result:
+ set_module_args({
+ 'login_host': 'test',
+ 'login_port': 1234,
+ 'login_password': 'PASS',
+ 'tls': True,
+ 'ca_certs': '/etc/ssl/ca.pem',
+ 'validate_certs': False
+ })
+ self.module.main()
+ self.assertEqual(redis_client.call_count, 1)
+ self.assertEqual(redis_client.call_args, ({'host': 'test',
+ 'port': 1234,
+ 'password': 'PASS',
+ 'ssl': True,
+ 'ssl_ca_certs': '/etc/ssl/ca.pem',
+ 'ssl_cert_reqs': None},))
self.assertEqual(result.exception.args[0]['info']['redis_version'], '999.999.999')
def test_with_fail_client(self):
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_rhsm_release.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_rhsm_release.py
index c5696962b..e8b2db6fd 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_rhsm_release.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_rhsm_release.py
@@ -14,6 +14,8 @@ from ansible_collections.community.general.tests.unit.plugins.modules.utils impo
class RhsmRepositoryReleaseModuleTestCase(ModuleTestCase):
module = rhsm_release
+ SUBMAN_KWARGS = dict(check_rc=True, expand_user_and_vars=False)
+
def setUp(self):
super(RhsmRepositoryReleaseModuleTestCase, self).setUp()
@@ -63,8 +65,8 @@ class RhsmRepositoryReleaseModuleTestCase(ModuleTestCase):
self.assertTrue(result['changed'])
self.assertEqual('7.5', result['current_release'])
self.module_main_command.assert_has_calls([
- call('/testbin/subscription-manager release --show', check_rc=True),
- call('/testbin/subscription-manager release --set 7.5', check_rc=True),
+ call(['/testbin/subscription-manager', 'release', '--show'], **self.SUBMAN_KWARGS),
+ call(['/testbin/subscription-manager', 'release', '--set', '7.5'], **self.SUBMAN_KWARGS),
])
def test_release_set_idempotent(self):
@@ -81,7 +83,7 @@ class RhsmRepositoryReleaseModuleTestCase(ModuleTestCase):
self.assertFalse(result['changed'])
self.assertEqual('7.5', result['current_release'])
self.module_main_command.assert_has_calls([
- call('/testbin/subscription-manager release --show', check_rc=True),
+ call(['/testbin/subscription-manager', 'release', '--show'], **self.SUBMAN_KWARGS),
])
def test_release_unset(self):
@@ -100,8 +102,8 @@ class RhsmRepositoryReleaseModuleTestCase(ModuleTestCase):
self.assertTrue(result['changed'])
self.assertIsNone(result['current_release'])
self.module_main_command.assert_has_calls([
- call('/testbin/subscription-manager release --show', check_rc=True),
- call('/testbin/subscription-manager release --unset', check_rc=True),
+ call(['/testbin/subscription-manager', 'release', '--show'], **self.SUBMAN_KWARGS),
+ call(['/testbin/subscription-manager', 'release', '--unset'], **self.SUBMAN_KWARGS),
])
def test_release_unset_idempotent(self):
@@ -118,7 +120,7 @@ class RhsmRepositoryReleaseModuleTestCase(ModuleTestCase):
self.assertFalse(result['changed'])
self.assertIsNone(result['current_release'])
self.module_main_command.assert_has_calls([
- call('/testbin/subscription-manager release --show', check_rc=True),
+ call(['/testbin/subscription-manager', 'release', '--show'], **self.SUBMAN_KWARGS),
])
def test_release_insane(self):
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_rhsm_repository.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_rhsm_repository.py
new file mode 100644
index 000000000..e822c7e84
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_rhsm_repository.py
@@ -0,0 +1,833 @@
+# -*- coding: utf-8 -*-
+# Author: Pino Toscano (ptoscano@redhat.com)
+# Largely adapted from test_rhsm_repository by
+# Jiri Hnidek (jhnidek@redhat.com)
+#
+# Copyright (c) Pino Toscano (ptoscano@redhat.com)
+# Copyright (c) Jiri Hnidek (jhnidek@redhat.com)
+#
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import copy
+import fnmatch
+import itertools
+import json
+
+from ansible.module_utils import basic
+from ansible_collections.community.general.plugins.modules import rhsm_repository
+
+import pytest
+
+TESTED_MODULE = rhsm_repository.__name__
+
+
+@pytest.fixture
+def patch_rhsm_repository(mocker):
+ """
+ Function used for mocking some parts of rhsm_repository module
+ """
+ mocker.patch('ansible_collections.community.general.plugins.modules.rhsm_repository.AnsibleModule.get_bin_path',
+ return_value='/testbin/subscription-manager')
+ mocker.patch('ansible_collections.community.general.plugins.modules.rhsm_repository.os.getuid',
+ return_value=0)
+
+
+class Repos(object):
+ """
+ Helper class to represent a list of repositories
+
+ Each repository is an object with few properties.
+ """
+
+ _SUBMAN_OUT_HEADER = """+----------------------------------------------------------+
+ Available Repositories in /etc/yum.repos.d/redhat.repo
++----------------------------------------------------------+
+"""
+ _SUBMAN_OUT_ENTRY = """Repo ID: %s
+Repo Name: %s
+Repo URL: %s
+Enabled: %s
+
+"""
+
+ def __init__(self, repos):
+ self.repos = repos
+
+ def to_subman_list_output(self):
+ """
+ Return a string mimicking the output of `subscription-manager repos --list`
+ """
+ out = self._SUBMAN_OUT_HEADER
+ for repo in self.repos:
+ out += self._SUBMAN_OUT_ENTRY % (
+ repo["id"],
+ repo["name"],
+ repo["url"],
+ "1" if repo["enabled"] else "0",
+ )
+
+ return out
+
+ def copy(self):
+ """
+ Clone the object; used to do changes (enable(), disable()) without
+ affecting the original object.
+ """
+ return copy.deepcopy(self)
+
+ def _set_status(self, repo_id, status):
+ for repo in self.repos:
+ if fnmatch.fnmatch(repo['id'], repo_id):
+ repo['enabled'] = status
+
+ def enable(self, repo_ids):
+ """
+ Enable the specified IDs.
+
+ 'repo_ids' can be either a string or a list of strings representing
+ an ID (wildcard included).
+
+ Returns the same object, so calls to this can be chained.
+ """
+ if not isinstance(repo_ids, list):
+ repo_ids = [repo_ids]
+ for repo_id in repo_ids:
+ self._set_status(repo_id, True)
+ return self
+
+ def disable(self, repo_ids):
+ """
+ Disable the specified IDs.
+
+ 'repo_ids' can be either a string or a list of strings representing
+ an ID (wildcard included).
+
+ Returns the same object, so calls to this can be chained.
+ """
+ if not isinstance(repo_ids, list):
+ repo_ids = [repo_ids]
+ for repo_id in repo_ids:
+ self._set_status(repo_id, False)
+ return self
+
+ def _filter_by_status(self, filter, status):
+ return [
+ repo['id']
+ for repo in self.repos
+ if repo['enabled'] == status and fnmatch.fnmatch(repo['id'], filter)
+ ]
+
+ def ids_enabled(self, filter='*'):
+ """
+ Get a list with the enabled repositories.
+
+ 'filter' is a wildcard expression.
+ """
+ return self._filter_by_status(filter, True)
+
+ def ids_disabled(self, filter='*'):
+ """
+ Get a list with the disabled repositories.
+
+ 'filter' is a wildcard expression.
+ """
+ return self._filter_by_status(filter, False)
+
+ def to_list(self):
+ """
+ Get the list of repositories.
+ """
+ return self.repos
+
+
+def flatten(iter_of_iters):
+ return list(itertools.chain.from_iterable(iter_of_iters))
+
+
+# List with test repositories, directly from the Candlepin test data.
+REPOS_LIST = [
+ {
+ "id": "never-enabled-content-801",
+ "name": "never-enabled-content-801",
+ "url": "https://candlepin.local/foo/path/never_enabled/801-100",
+ "enabled": False,
+ },
+ {
+ "id": "never-enabled-content-100000000000060",
+ "name": "never-enabled-content-100000000000060",
+ "url": "https://candlepin.local/foo/path/never_enabled/100000000000060-100",
+ "enabled": False,
+ },
+ {
+ "id": "awesomeos-x86_64-1000000000000023",
+ "name": "awesomeos-x86_64-1000000000000023",
+ "url": "https://candlepin.local/path/to/awesomeos/x86_64/1000000000000023-11124",
+ "enabled": False,
+ },
+ {
+ "id": "awesomeos-ppc64-100000000000011",
+ "name": "awesomeos-ppc64-100000000000011",
+ "url": "https://candlepin.local/path/to/awesomeos/ppc64/100000000000011-11126",
+ "enabled": False,
+ },
+ {
+ "id": "awesomeos-99000",
+ "name": "awesomeos-99000",
+ "url": "https://candlepin.local/path/to/generic/awesomeos/99000-11113",
+ "enabled": True,
+ },
+ {
+ "id": "content-label-27060",
+ "name": "content-27060",
+ "url": "https://candlepin.local/foo/path/common/27060-1111",
+ "enabled": True,
+ },
+ {
+ "id": "content-label-no-gpg-32060",
+ "name": "content-nogpg-32060",
+ "url": "https://candlepin.local/foo/path/no_gpg/32060-234",
+ "enabled": False,
+ },
+ {
+ "id": "awesomeos-1000000000000023",
+ "name": "awesomeos-1000000000000023",
+ "url": "https://candlepin.local/path/to/generic/awesomeos/1000000000000023-11113",
+ "enabled": False,
+ },
+ {
+ "id": "awesomeos-x86-100000000000020",
+ "name": "awesomeos-x86-100000000000020",
+ "url": "https://candlepin.local/path/to/awesomeos/x86/100000000000020-11120",
+ "enabled": False,
+ },
+ {
+ "id": "awesomeos-x86_64-99000",
+ "name": "awesomeos-x86_64-99000",
+ "url": "https://candlepin.local/path/to/awesomeos/x86_64/99000-11124",
+ "enabled": True,
+ },
+ {
+ "id": "awesomeos-s390x-99000",
+ "name": "awesomeos-s390x-99000",
+ "url": "https://candlepin.local/path/to/awesomeos/s390x/99000-11121",
+ "enabled": False,
+ },
+ {
+ "id": "awesomeos-modifier-37080",
+ "name": "awesomeos-modifier-37080",
+ "url": "https://candlepin.local/example.com/awesomeos-modifier/37080-1112",
+ "enabled": False,
+ },
+ {
+ "id": "awesomeos-i686-99000",
+ "name": "awesomeos-i686-99000",
+ "url": "https://candlepin.local/path/to/awesomeos/i686/99000-11123",
+ "enabled": False,
+ },
+ {
+ "id": "fake-content-38072",
+ "name": "fake-content-38072",
+ "url": "https://candlepin.local/path/to/fake-content/38072-3902",
+ "enabled": True,
+ },
+]
+
+
+# A static object with the list of repositories, used as reference to query
+# the repositories, and create (by copy()) new Repos objects.
+REPOS = Repos(REPOS_LIST)
+
+# The mock string for the output of `subscription-manager repos --list`.
+REPOS_LIST_OUTPUT = REPOS.to_subman_list_output()
+
+# MUST match what's in the Rhsm class in the module.
+SUBMAN_KWARGS = {
+ 'environ_update': dict(LANG='C', LC_ALL='C', LC_MESSAGES='C'),
+ 'expand_user_and_vars': False,
+ 'use_unsafe_shell': False,
+}
+
+
+TEST_CASES = [
+ # enable a disabled repository
+ [
+ {
+ 'name': 'awesomeos-1000000000000023',
+ },
+ {
+ 'id': 'test_enable_single',
+ 'run_command.calls': [
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--list',
+ ],
+ SUBMAN_KWARGS,
+ (0, REPOS_LIST_OUTPUT, '')
+ ),
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--enable',
+ 'awesomeos-1000000000000023',
+ ],
+ SUBMAN_KWARGS,
+ (0, '', '')
+ ),
+ ],
+ 'changed': True,
+ 'repositories': REPOS.copy().enable('awesomeos-1000000000000023'),
+ }
+ ],
+ # enable an already enabled repository
+ [
+ {
+ 'name': 'fake-content-38072',
+ },
+ {
+ 'id': 'test_enable_already_enabled',
+ 'run_command.calls': [
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--list',
+ ],
+ SUBMAN_KWARGS,
+ (0, REPOS_LIST_OUTPUT, '')
+ ),
+ ],
+ 'changed': False,
+ 'repositories': REPOS.copy(),
+ }
+ ],
+ # enable two disabled repositories
+ [
+ {
+ 'name': ['awesomeos-1000000000000023', 'content-label-no-gpg-32060'],
+ },
+ {
+ 'id': 'test_enable_multiple',
+ 'run_command.calls': [
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--list',
+ ],
+ SUBMAN_KWARGS,
+ (0, REPOS_LIST_OUTPUT, '')
+ ),
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--enable',
+ 'awesomeos-1000000000000023',
+ '--enable',
+ 'content-label-no-gpg-32060',
+ ],
+ SUBMAN_KWARGS,
+ (0, '', '')
+ ),
+ ],
+ 'changed': True,
+ 'repositories': REPOS.copy().enable('awesomeos-1000000000000023').enable('content-label-no-gpg-32060'),
+ }
+ ],
+ # enable two repositories, one disabled and one already enabled
+ [
+ {
+ 'name': ['awesomeos-1000000000000023', 'fake-content-38072'],
+ },
+ {
+ 'id': 'test_enable_multiple_mixed',
+ 'run_command.calls': [
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--list',
+ ],
+ SUBMAN_KWARGS,
+ (0, REPOS_LIST_OUTPUT, '')
+ ),
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--enable',
+ 'awesomeos-1000000000000023',
+ '--enable',
+ 'fake-content-38072',
+ ],
+ SUBMAN_KWARGS,
+ (0, '', '')
+ ),
+ ],
+ 'changed': True,
+ 'repositories': REPOS.copy().enable('awesomeos-1000000000000023'),
+ }
+ ],
+ # purge everything but never-enabled-content-801 (disabled)
+ [
+ {
+ 'name': 'never-enabled-content-801',
+ 'purge': True,
+ },
+ {
+ 'id': 'test_purge_everything_but_one_disabled',
+ 'run_command.calls': [
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--list',
+ ],
+ SUBMAN_KWARGS,
+ (0, REPOS_LIST_OUTPUT, '')
+ ),
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--enable',
+ 'never-enabled-content-801',
+ ] + flatten([['--disable', i] for i in REPOS.ids_enabled() if i != 'never-enabled-content-801']),
+ SUBMAN_KWARGS,
+ (0, '', '')
+ ),
+ ],
+ 'changed': True,
+ 'repositories': REPOS.copy().disable('*').enable('never-enabled-content-801'),
+ }
+ ],
+ # purge everything but awesomeos-99000 (already enabled)
+ [
+ {
+ 'name': 'awesomeos-99000',
+ 'purge': True,
+ },
+ {
+ 'id': 'test_purge_everything_but_one_enabled',
+ 'run_command.calls': [
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--list',
+ ],
+ SUBMAN_KWARGS,
+ (0, REPOS_LIST_OUTPUT, '')
+ ),
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--enable',
+ 'awesomeos-99000',
+ '--disable',
+ 'content-label-27060',
+ '--disable',
+ 'awesomeos-x86_64-99000',
+ '--disable',
+ 'fake-content-38072',
+ ],
+ SUBMAN_KWARGS,
+ (0, '', '')
+ ),
+ ],
+ 'changed': True,
+ 'repositories': REPOS.copy().disable('*').enable('awesomeos-99000'),
+ }
+ ],
+ # enable everything, then purge everything but content-label-27060
+ [
+ {
+ 'name': 'content-label-27060',
+ 'purge': True,
+ },
+ {
+ 'id': 'test_enable_everything_purge_everything_but_one_enabled',
+ 'run_command.calls': [
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--list',
+ ],
+ SUBMAN_KWARGS,
+ (0, REPOS.copy().enable('*').to_subman_list_output(), '')
+ ),
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--enable',
+ 'content-label-27060',
+ '--disable',
+ 'never-enabled-content-801',
+ '--disable',
+ 'never-enabled-content-100000000000060',
+ '--disable',
+ 'awesomeos-x86_64-1000000000000023',
+ '--disable',
+ 'awesomeos-ppc64-100000000000011',
+ '--disable',
+ 'awesomeos-99000',
+ '--disable',
+ 'content-label-no-gpg-32060',
+ '--disable',
+ 'awesomeos-1000000000000023',
+ '--disable',
+ 'awesomeos-x86-100000000000020',
+ '--disable',
+ 'awesomeos-x86_64-99000',
+ '--disable',
+ 'awesomeos-s390x-99000',
+ '--disable',
+ 'awesomeos-modifier-37080',
+ '--disable',
+ 'awesomeos-i686-99000',
+ '--disable',
+ 'fake-content-38072',
+ ],
+ SUBMAN_KWARGS,
+ (0, '', '')
+ ),
+ ],
+ 'changed': True,
+ 'repositories': REPOS.copy().disable('*').enable('content-label-27060'),
+ }
+ ],
+ # enable all awesomeos-*
+ [
+ {
+ 'name': 'awesomeos-*',
+ },
+ {
+ 'id': 'test_enable_all_awesomeos_star',
+ 'run_command.calls': [
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--list',
+ ],
+ SUBMAN_KWARGS,
+ (0, REPOS_LIST_OUTPUT, '')
+ ),
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--enable',
+ 'awesomeos-x86_64-1000000000000023',
+ '--enable',
+ 'awesomeos-ppc64-100000000000011',
+ '--enable',
+ 'awesomeos-99000',
+ '--enable',
+ 'awesomeos-1000000000000023',
+ '--enable',
+ 'awesomeos-x86-100000000000020',
+ '--enable',
+ 'awesomeos-x86_64-99000',
+ '--enable',
+ 'awesomeos-s390x-99000',
+ '--enable',
+ 'awesomeos-modifier-37080',
+ '--enable',
+ 'awesomeos-i686-99000',
+ ],
+ SUBMAN_KWARGS,
+ (0, '', '')
+ ),
+ ],
+ 'changed': True,
+ 'repositories': REPOS.copy().enable('awesomeos-*'),
+ }
+ ],
+ # purge everything but awesomeos-*
+ [
+ {
+ 'name': REPOS.ids_enabled('awesomeos-*'),
+ 'purge': True,
+ },
+ {
+ 'id': 'test_purge_everything_but_awesomeos_list',
+ 'run_command.calls': [
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--list',
+ ],
+ SUBMAN_KWARGS,
+ (0, REPOS_LIST_OUTPUT, '')
+ ),
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--enable',
+ 'awesomeos-99000',
+ '--enable',
+ 'awesomeos-x86_64-99000',
+ '--disable',
+ 'content-label-27060',
+ '--disable',
+ 'fake-content-38072',
+ ],
+ SUBMAN_KWARGS,
+ (0, '', '')
+ ),
+ ],
+ 'changed': True,
+ 'repositories': REPOS.copy().disable('*').enable(REPOS.ids_enabled('awesomeos-*')),
+ }
+ ],
+ # enable a repository that does not exist
+ [
+ {
+ 'name': 'repo-that-does-not-exist',
+ },
+ {
+ 'id': 'test_enable_nonexisting',
+ 'run_command.calls': [
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--list',
+ ],
+ SUBMAN_KWARGS,
+ (0, REPOS_LIST_OUTPUT, '')
+ ),
+ ],
+ 'failed': True,
+ 'msg': 'repo-that-does-not-exist is not a valid repository ID',
+ }
+ ],
+ # disable an enabled repository
+ [
+ {
+ 'name': 'awesomeos-99000',
+ 'state': 'disabled',
+ },
+ {
+ 'id': 'test_disable_single',
+ 'run_command.calls': [
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--list',
+ ],
+ SUBMAN_KWARGS,
+ (0, REPOS_LIST_OUTPUT, '')
+ ),
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--disable',
+ 'awesomeos-99000',
+ ],
+ SUBMAN_KWARGS,
+ (0, '', '')
+ ),
+ ],
+ 'changed': True,
+ 'repositories': REPOS.copy().disable('awesomeos-99000'),
+ }
+ ],
+ # disable an enabled repository (using state=absent)
+ [
+ {
+ 'name': 'awesomeos-99000',
+ 'state': 'absent',
+ },
+ {
+ 'id': 'test_disable_single_using_absent',
+ 'run_command.calls': [
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--list',
+ ],
+ SUBMAN_KWARGS,
+ (0, REPOS_LIST_OUTPUT, '')
+ ),
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--disable',
+ 'awesomeos-99000',
+ ],
+ SUBMAN_KWARGS,
+ (0, '', '')
+ ),
+ ],
+ 'changed': True,
+ 'repositories': REPOS.copy().disable('awesomeos-99000'),
+ }
+ ],
+ # disable an already disabled repository
+ [
+ {
+ 'name': 'never-enabled-content-801',
+ 'state': 'disabled',
+ },
+ {
+ 'id': 'test_disable_already_disabled',
+ 'run_command.calls': [
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--list',
+ ],
+ SUBMAN_KWARGS,
+ (0, REPOS_LIST_OUTPUT, '')
+ ),
+ ],
+ 'changed': False,
+ 'repositories': REPOS.copy(),
+ }
+ ],
+ # disable an already disabled repository, and purge
+ [
+ {
+ 'name': 'never-enabled-content-801',
+ 'state': 'disabled',
+ 'purge': True,
+ },
+ {
+ 'id': 'test_disable_already_disabled_and_purge',
+ 'run_command.calls': [
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--list',
+ ],
+ SUBMAN_KWARGS,
+ (0, REPOS_LIST_OUTPUT, '')
+ ),
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ ] + flatten([['--disable', i] for i in REPOS.ids_enabled()]),
+ SUBMAN_KWARGS,
+ (0, '', '')
+ ),
+ ],
+ 'changed': True,
+ 'repositories': REPOS.copy().disable('*'),
+ }
+ ],
+ # disable an enabled repository, and purge
+ [
+ {
+ 'name': 'awesomeos-99000',
+ 'state': 'disabled',
+ 'purge': True,
+ },
+ {
+ 'id': 'test_disable_single_and_purge',
+ 'run_command.calls': [
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--list',
+ ],
+ SUBMAN_KWARGS,
+ (0, REPOS_LIST_OUTPUT, '')
+ ),
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ ] + flatten([['--disable', i] for i in REPOS.ids_enabled()]),
+ SUBMAN_KWARGS,
+ (0, '', '')
+ ),
+ ],
+ 'changed': True,
+ 'repositories': REPOS.copy().disable('*'),
+ }
+ ],
+ # disable a repository that does not exist
+ [
+ {
+ 'name': 'repo-that-does-not-exist',
+ 'state': 'disabled',
+ },
+ {
+ 'id': 'test_disable_nonexisting',
+ 'run_command.calls': [
+ (
+ [
+ '/testbin/subscription-manager',
+ 'repos',
+ '--list',
+ ],
+ SUBMAN_KWARGS,
+ (0, REPOS_LIST_OUTPUT, '')
+ ),
+ ],
+ 'failed': True,
+ 'msg': 'repo-that-does-not-exist is not a valid repository ID',
+ }
+ ],
+]
+
+
+TEST_CASES_IDS = [item[1]['id'] for item in TEST_CASES]
+
+
+@pytest.mark.parametrize('patch_ansible_module, testcase', TEST_CASES, ids=TEST_CASES_IDS, indirect=['patch_ansible_module'])
+@pytest.mark.usefixtures('patch_ansible_module')
+def test_rhsm_repository(mocker, capfd, patch_rhsm_repository, testcase):
+ """
+ Run unit tests for test cases listen in TEST_CASES
+ """
+
+ # Mock function used for running commands first
+ call_results = [item[2] for item in testcase['run_command.calls']]
+ mock_run_command = mocker.patch.object(
+ basic.AnsibleModule,
+ 'run_command',
+ side_effect=call_results)
+
+ # Try to run test case
+ with pytest.raises(SystemExit):
+ rhsm_repository.main()
+
+ out, err = capfd.readouterr()
+ results = json.loads(out)
+
+ if 'failed' in testcase:
+ assert results['failed'] == testcase['failed']
+ assert results['msg'] == testcase['msg']
+ else:
+ assert 'changed' in results
+ assert results['changed'] == testcase['changed']
+ assert results['repositories'] == testcase['repositories'].to_list()
+
+ assert basic.AnsibleModule.run_command.call_count == len(testcase['run_command.calls'])
+ # FIXME ideally we need also to compare the actual calls with the expected
+ # ones; the problem is that the module uses a dict to collect the repositories
+ # to enable and disable, so the order of the --enable/--disable parameters to
+ # `subscription-manager repos` is not stable
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_sap_task_list_execute.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_sap_task_list_execute.py
deleted file mode 100644
index 34c97c4a8..000000000
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_sap_task_list_execute.py
+++ /dev/null
@@ -1,91 +0,0 @@
-# Copyright (c) Ansible project
-# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import sys
-from ansible_collections.community.general.tests.unit.compat.mock import patch, MagicMock
-from ansible_collections.community.general.tests.unit.plugins.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
-
-sys.modules['pyrfc'] = MagicMock()
-sys.modules['pyrfc.Connection'] = MagicMock()
-sys.modules['xmltodict'] = MagicMock()
-sys.modules['xmltodict.parse'] = MagicMock()
-
-from ansible_collections.community.general.plugins.modules import sap_task_list_execute
-
-
-class TestSAPRfcModule(ModuleTestCase):
-
- def setUp(self):
- super(TestSAPRfcModule, self).setUp()
- self.module = sap_task_list_execute
-
- def tearDown(self):
- super(TestSAPRfcModule, self).tearDown()
-
- def define_rfc_connect(self, mocker):
- return mocker.patch(self.module.call_rfc_method)
-
- def test_without_required_parameters(self):
- """Failure must occurs when all parameters are missing"""
- with self.assertRaises(AnsibleFailJson):
- set_module_args({})
- self.module.main()
-
- def test_error_no_task_list(self):
- """tests fail to exec task list"""
-
- set_module_args({
- "conn_username": "DDIC",
- "conn_password": "Test1234",
- "host": "10.1.8.9",
- "task_to_execute": "SAP_BASIS_SSL_CHECK"
- })
-
- with patch.object(self.module, 'Connection') as conn:
- conn.return_value = ''
- with self.assertRaises(AnsibleFailJson) as result:
- self.module.main()
- self.assertEqual(result.exception.args[0]['msg'], 'The task list does not exsist.')
-
- def test_success(self):
- """test execute task list success"""
-
- set_module_args({
- "conn_username": "DDIC",
- "conn_password": "Test1234",
- "host": "10.1.8.9",
- "task_to_execute": "SAP_BASIS_SSL_CHECK"
- })
- with patch.object(self.module, 'xml_to_dict') as XML:
- XML.return_value = {'item': [{'TASK': {'CHECK_STATUS_DESCR': 'Check successfully',
- 'STATUS_DESCR': 'Executed successfully', 'TASKNAME': 'CL_STCT_CHECK_SEC_CRYPTO',
- 'LNR': '1', 'DESCRIPTION': 'Check SAP Cryptographic Library', 'DOCU_EXIST': 'X',
- 'LOG_EXIST': 'X', 'ACTION_SKIP': None, 'ACTION_UNSKIP': None, 'ACTION_CONFIRM': None,
- 'ACTION_MAINTAIN': None}}]}
-
- with self.assertRaises(AnsibleExitJson) as result:
- sap_task_list_execute.main()
- self.assertEqual(result.exception.args[0]['out'], {'item': [{'TASK': {'CHECK_STATUS_DESCR': 'Check successfully',
- 'STATUS_DESCR': 'Executed successfully', 'TASKNAME': 'CL_STCT_CHECK_SEC_CRYPTO',
- 'LNR': '1', 'DESCRIPTION': 'Check SAP Cryptographic Library', 'DOCU_EXIST': 'X',
- 'LOG_EXIST': 'X', 'ACTION_SKIP': None, 'ACTION_UNSKIP': None,
- 'ACTION_CONFIRM': None, 'ACTION_MAINTAIN': None}}]})
-
- def test_success_no_log(self):
- """test execute task list success without logs"""
-
- set_module_args({
- "conn_username": "DDIC",
- "conn_password": "Test1234",
- "host": "10.1.8.9",
- "task_to_execute": "SAP_BASIS_SSL_CHECK"
- })
- with patch.object(self.module, 'xml_to_dict') as XML:
- XML.return_value = "No logs available."
- with self.assertRaises(AnsibleExitJson) as result:
- sap_task_list_execute.main()
- self.assertEqual(result.exception.args[0]['out'], 'No logs available.')
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_sapcar_extract.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_sapcar_extract.py
deleted file mode 100644
index bec9cf886..000000000
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_sapcar_extract.py
+++ /dev/null
@@ -1,54 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright (c) 2021, Rainer Leber (@rainerleber) <rainerleber@gmail.com>
-# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-from ansible_collections.community.general.plugins.modules import sapcar_extract
-from ansible_collections.community.general.tests.unit.plugins.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
-from ansible_collections.community.general.tests.unit.compat.mock import patch
-from ansible.module_utils import basic
-
-
-def get_bin_path(*args, **kwargs):
- """Function to return path of SAPCAR"""
- return "/tmp/sapcar"
-
-
-class Testsapcar_extract(ModuleTestCase):
- """Main class for testing sapcar_extract module."""
-
- def setUp(self):
- """Setup."""
- super(Testsapcar_extract, self).setUp()
- self.module = sapcar_extract
- self.mock_get_bin_path = patch.object(basic.AnsibleModule, 'get_bin_path', get_bin_path)
- self.mock_get_bin_path.start()
- self.addCleanup(self.mock_get_bin_path.stop) # ensure that the patching is 'undone'
-
- def tearDown(self):
- """Teardown."""
- super(Testsapcar_extract, self).tearDown()
-
- def test_without_required_parameters(self):
- """Failure must occurs when all parameters are missing."""
- with self.assertRaises(AnsibleFailJson):
- set_module_args({})
- self.module.main()
-
- def test_sapcar_extract(self):
- """Check that result is changed."""
- set_module_args({
- 'path': "/tmp/HANA_CLIENT_REV2_00_053_00_LINUX_X86_64.SAR",
- 'dest': "/tmp/test2",
- 'binary_path': "/tmp/sapcar"
- })
- with patch.object(basic.AnsibleModule, 'run_command') as run_command:
- run_command.return_value = 0, '', '' # successful execution, no output
- with self.assertRaises(AnsibleExitJson) as result:
- sapcar_extract.main()
- self.assertTrue(result.exception.args[0]['changed'])
- self.assertEqual(run_command.call_count, 1)
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_simpleinit_msb.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_simpleinit_msb.py
new file mode 100644
index 000000000..d97e9b5f2
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_simpleinit_msb.py
@@ -0,0 +1,200 @@
+# Copyright (c) 2023 Vlad Glagolev <scm@vaygr.net>
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+from ansible_collections.community.general.tests.unit.compat.mock import patch
+from ansible_collections.community.general.tests.unit.plugins.modules.utils import AnsibleFailJson, ModuleTestCase, set_module_args
+
+from ansible_collections.community.general.plugins.modules.simpleinit_msb import SimpleinitMSB, build_module
+
+
+_TELINIT_LIST = """
+RUNLEVEL SCRIPT
+2 smgl-suspend-single
+3 crond
+3 fuse
+3 network
+3 nscd
+3 smgl-default-remote-fs
+3 smgl-misc
+3 sshd
+DEV coldplug
+DEV devices
+DEV udevd
+S hostname.sh
+S hwclock.sh
+S keymap.sh
+S modutils
+S mountall.sh
+S mountroot.sh
+S single
+S smgl-default-crypt-fs
+S smgl-metalog
+S smgl-sysctl
+S sysstat
+"""
+
+_TELINIT_LIST_ENABLED = """
+smgl-suspend-single
+crond
+fuse
+network
+nscd
+smgl-default-remote-fs
+smgl-misc
+sshd
+coldplug
+devices
+udevd
+hostname.sh
+hwclock.sh
+keymap.sh
+modutils
+mountall.sh
+mountroot.sh
+single
+smgl-default-crypt-fs
+smgl-metalog
+smgl-sysctl
+"""
+
+_TELINIT_LIST_DISABLED = """
+sysstat
+"""
+
+_TELINIT_ALREADY_ENABLED = """
+Service smgl-suspend-single already enabled.
+"""
+
+_TELINIT_ALREADY_DISABLED = """
+Service smgl-suspend-single already disabled.
+"""
+
+_TELINIT_STATUS_RUNNING = """
+sshd is running with Process ID(s) 8510 8508 2195
+"""
+
+_TELINIT_STATUS_RUNNING_NOT = """
+/sbin/metalog is not running
+"""
+
+
+class TestSimpleinitMSB(ModuleTestCase):
+
+ def setUp(self):
+ super(TestSimpleinitMSB, self).setUp()
+
+ def tearDown(self):
+ super(TestSimpleinitMSB, self).tearDown()
+
+ def init_module(self, args):
+ set_module_args(args)
+
+ return SimpleinitMSB(build_module())
+
+ @patch('os.path.exists', return_value=True)
+ @patch('ansible.module_utils.basic.AnsibleModule.get_bin_path', return_value="/sbin/telinit")
+ def test_get_service_tools(self, *args, **kwargs):
+ simpleinit_msb = self.init_module({
+ 'name': 'smgl-suspend-single',
+ 'state': 'running',
+ })
+
+ simpleinit_msb.get_service_tools()
+
+ self.assertEqual(simpleinit_msb.telinit_cmd, "/sbin/telinit")
+
+ @patch('ansible_collections.community.general.plugins.modules.simpleinit_msb.SimpleinitMSB.execute_command')
+ def test_service_exists(self, execute_command):
+ simpleinit_msb = self.init_module({
+ 'name': 'smgl-suspend-single',
+ 'state': 'running',
+ })
+
+ execute_command.return_value = (0, _TELINIT_LIST, "")
+
+ simpleinit_msb.service_exists()
+
+ @patch('ansible_collections.community.general.plugins.modules.simpleinit_msb.SimpleinitMSB.execute_command')
+ def test_service_exists_not(self, execute_command):
+ simpleinit_msb = self.init_module({
+ 'name': 'ntp',
+ 'state': 'running',
+ })
+
+ execute_command.return_value = (0, _TELINIT_LIST, "")
+
+ with self.assertRaises(AnsibleFailJson) as context:
+ simpleinit_msb.service_exists()
+
+ self.assertEqual("telinit could not find the requested service: ntp", context.exception.args[0]["msg"])
+
+ @patch('ansible_collections.community.general.plugins.modules.simpleinit_msb.SimpleinitMSB.service_exists')
+ @patch('ansible_collections.community.general.plugins.modules.simpleinit_msb.SimpleinitMSB.execute_command')
+ def test_check_service_enabled(self, execute_command, service_exists):
+ simpleinit_msb = self.init_module({
+ 'name': 'nscd',
+ 'state': 'running',
+ 'enabled': 'true',
+ })
+
+ service_exists.return_value = True
+ execute_command.return_value = (0, _TELINIT_LIST_ENABLED, "")
+
+ self.assertTrue(simpleinit_msb.service_enabled())
+
+ # Race condition check
+ with patch('ansible_collections.community.general.plugins.modules.simpleinit_msb.SimpleinitMSB.service_enabled', return_value=False):
+ execute_command.return_value = (0, "", _TELINIT_ALREADY_ENABLED)
+
+ simpleinit_msb.service_enable()
+
+ self.assertFalse(simpleinit_msb.changed)
+
+ @patch('ansible_collections.community.general.plugins.modules.simpleinit_msb.SimpleinitMSB.service_exists')
+ @patch('ansible_collections.community.general.plugins.modules.simpleinit_msb.SimpleinitMSB.execute_command')
+ def test_check_service_disabled(self, execute_command, service_exists):
+ simpleinit_msb = self.init_module({
+ 'name': 'sysstat',
+ 'state': 'stopped',
+ 'enabled': 'false',
+ })
+
+ service_exists.return_value = True
+ execute_command.return_value = (0, _TELINIT_LIST_DISABLED, "")
+
+ self.assertFalse(simpleinit_msb.service_enabled())
+
+ # Race condition check
+ with patch('ansible_collections.community.general.plugins.modules.simpleinit_msb.SimpleinitMSB.service_enabled', return_value=True):
+ execute_command.return_value = (0, "", _TELINIT_ALREADY_DISABLED)
+
+ simpleinit_msb.service_enable()
+
+ self.assertFalse(simpleinit_msb.changed)
+
+ @patch('ansible_collections.community.general.plugins.modules.simpleinit_msb.SimpleinitMSB.service_control')
+ def test_check_service_running(self, service_control):
+ simpleinit_msb = self.init_module({
+ 'name': 'sshd',
+ 'state': 'running',
+ })
+
+ service_control.return_value = (0, _TELINIT_STATUS_RUNNING, "")
+
+ self.assertFalse(simpleinit_msb.get_service_status())
+
+ @patch('ansible_collections.community.general.plugins.modules.simpleinit_msb.SimpleinitMSB.service_control')
+ def test_check_service_running_not(self, service_control):
+ simpleinit_msb = self.init_module({
+ 'name': 'smgl-metalog',
+ 'state': 'running',
+ })
+
+ service_control.return_value = (0, _TELINIT_STATUS_RUNNING_NOT, "")
+
+ self.assertFalse(simpleinit_msb.get_service_status())
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_slack.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_slack.py
index ab4405baa..52ac9b7f3 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_slack.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_slack.py
@@ -105,7 +105,7 @@ class TestSlackModule(ModuleTestCase):
self.module.main()
self.assertTrue(fetch_url_mock.call_count, 1)
- self.assertEquals(fetch_url_mock.call_args[1]['url'], "https://slack.com/api/chat.postMessage")
+ self.assertEqual(fetch_url_mock.call_args[1]['url'], "https://slack.com/api/chat.postMessage")
def test_edit_message(self):
set_module_args({
@@ -125,9 +125,9 @@ class TestSlackModule(ModuleTestCase):
self.module.main()
self.assertTrue(fetch_url_mock.call_count, 2)
- self.assertEquals(fetch_url_mock.call_args[1]['url'], "https://slack.com/api/chat.update")
+ self.assertEqual(fetch_url_mock.call_args[1]['url'], "https://slack.com/api/chat.update")
call_data = json.loads(fetch_url_mock.call_args[1]['data'])
- self.assertEquals(call_data['ts'], "12345")
+ self.assertEqual(call_data['ts'], "12345")
def test_message_with_blocks(self):
"""tests sending a message with blocks"""
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_snap.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_snap.py
new file mode 100644
index 000000000..480f637b6
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_snap.py
@@ -0,0 +1,474 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) Alexei Znamensky (russoz@gmail.com)
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from .helper import Helper, ModuleTestCase, RunCmdCall
+from ansible_collections.community.general.plugins.modules import snap
+
+
+issue_6803_status_out = """Name Version Rev Tracking Publisher Notes
+core20 20220826 1623 latest/stable canonical** base
+lxd 5.6-794016a 23680 latest/stable/… canonical** -
+snapd 2.57.4 17336 latest/stable canonical** snapd
+"""
+
+issue_6803_microk8s_out = (
+ "\rEnsure prerequisites for \"microk8s\" are available /"
+ "\rDownload snap \"microk8s\" (5372) from channel \"1.27/stable\" "
+ "\rDownload snap \"microk8s\" (5372) from channel \"1.27/stable\" \\"
+ "\rDownload snap \"microk8s\" (5372) from channel \"1.27/stable\" "
+ "\rDownload snap \"microk8s\" (5372) from channel \"1.27/stable\" /\u001b[?25"
+ "\r\u001b[7m\u001b[0mDownload snap \"microk8s\" (5372) from channel \"1.27/stable\" 0% 0B/s ages"
+ "\r\u001b[7m\u001b[0mDownload snap \"microk8s\" (5372) from channel \"1.27/stable\" 0% 0B/s ages"
+ "\r\u001b[7m\u001b[0mDownload snap \"microk8s\" (5372) from channel \"1.27/stable\" 0% 0B/s ages"
+ "\r\u001b[7m\u001b[0mDownload snap \"microk8s\" (5372) from channel \"1.27/stable\" 0% 880kB/s 3m21"
+ "\r\u001b[7m\u001b[0mDownload snap \"microk8s\" (5372) from channel \"1.27/stable\" 1% 2.82MB/s 1m02"
+ "\r\u001b[7mD\u001b[0mownload snap \"microk8s\" (5372) from channel \"1.27/stable\" 2% 4.71MB/s 37.0"
+ "\r\u001b[7mDo\u001b[0mwnload snap \"microk8s\" (5372) from channel \"1.27/stable\" 4% 9.09MB/s 18.8"
+ "\r\u001b[7mDown\u001b[0mload snap \"microk8s\" (5372) from channel \"1.27/stable\" 6% 12.4MB/s 13.5"
+ "\r\u001b[7mDownl\u001b[0moad snap \"microk8s\" (5372) from channel \"1.27/stable\" 7% 14.5MB/s 11.3"
+ "\r\u001b[7mDownloa\u001b[0md snap \"microk8s\" (5372) from channel \"1.27/stable\" 9% 15.9MB/s 10.1"
+ "\r\u001b[7mDownload \u001b[0msnap \"microk8s\" (5372) from channel \"1.27/stable\" 11% 18.0MB/s 8.75"
+ "\r\u001b[7mDownload s\u001b[0mnap \"microk8s\" (5372) from channel \"1.27/stable\" 13% 19.4MB/s 7.91"
+ "\r\u001b[7mDownload sn\u001b[0map \"microk8s\" (5372) from channel \"1.27/stable\" 15% 20.1MB/s 7.50"
+ "\r\u001b[7mDownload snap\u001b[0m \"microk8s\" (5372) from channel \"1.27/stable\" 17% 20.9MB/s 7.05"
+ "\r\u001b[7mDownload snap \"\u001b[0mmicrok8s\" (5372) from channel \"1.27/stable\" 19% 22.1MB/s 6.50"
+ "\r\u001b[7mDownload snap \"m\u001b[0microk8s\" (5372) from channel \"1.27/stable\" 21% 22.9MB/s 6.11"
+ "\r\u001b[7mDownload snap \"mic\u001b[0mrok8s\" (5372) from channel \"1.27/stable\" 23% 23.2MB/s 5.90"
+ "\r\u001b[7mDownload snap \"micr\u001b[0mok8s\" (5372) from channel \"1.27/stable\" 25% 23.9MB/s 5.58"
+ "\r\u001b[7mDownload snap \"microk\u001b[0m8s\" (5372) from channel \"1.27/stable\" 27% 24.5MB/s 5.30"
+ "\r\u001b[7mDownload snap \"microk8\u001b[0ms\" (5372) from channel \"1.27/stable\" 29% 24.9MB/s 5.09"
+ "\r\u001b[7mDownload snap \"microk8s\"\u001b[0m (5372) from channel \"1.27/stable\" 31% 25.4MB/s 4.85"
+ "\r\u001b[7mDownload snap \"microk8s\" (\u001b[0m5372) from channel \"1.27/stable\" 33% 25.8MB/s 4.63"
+ "\r\u001b[7mDownload snap \"microk8s\" (5\u001b[0m372) from channel \"1.27/stable\" 35% 26.2MB/s 4.42"
+ "\r\u001b[7mDownload snap \"microk8s\" (53\u001b[0m72) from channel \"1.27/stable\" 36% 26.3MB/s 4.30"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372\u001b[0m) from channel \"1.27/stable\" 38% 26.7MB/s 4.10"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) \u001b[0mfrom channel \"1.27/stable\" 40% 26.9MB/s 3.95"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) f\u001b[0mrom channel \"1.27/stable\" 42% 27.2MB/s 3.77"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) fro\u001b[0mm channel \"1.27/stable\" 44% 27.4MB/s 3.63"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from\u001b[0m channel \"1.27/stable\" 46% 27.8MB/s 3.44"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from c\u001b[0mhannel \"1.27/stable\" 48% 27.9MB/s 3.31"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from cha\u001b[0mnnel \"1.27/stable\" 50% 28.1MB/s 3.15"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from chan\u001b[0mnel \"1.27/stable\" 52% 28.3MB/s 3.02"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channe\u001b[0ml \"1.27/stable\" 54% 28.5MB/s 2.87"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channel\u001b[0m \"1.27/stable\" 56% 28.6MB/s 2.75"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channel \u001b[0m\"1.27/stable\" 57% 28.7MB/s 2.63"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channel \"1\u001b[0m.27/stable\" 60% 28.9MB/s 2.47"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channel \"1.2\u001b[0m7/stable\" 62% 29.0MB/s 2.35"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channel \"1.27\u001b[0m/stable\" 63% 29.1MB/s 2.23"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channel \"1.27/s\u001b[0mtable\" 65% 29.2MB/s 2.10"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channel \"1.27/st\u001b[0mable\" 67% 29.4MB/s 1.97"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channel \"1.27/stab\u001b[0mle\" 69% 29.5MB/s 1.85"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channel \"1.27/stabl\u001b[0me\" 71% 29.5MB/s 1.74"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channel \"1.27/stable\"\u001b[0m 73% 29.7MB/s 1.59"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channel \"1.27/stable\" \u001b[0m 75% 29.8MB/s 1.48"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channel \"1.27/stable\" \u001b[0m 77% 29.8MB/s 1.37"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channel \"1.27/stable\" 7\u001b[0m9% 29.9MB/s 1.26"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channel \"1.27/stable\" 81\u001b[0m% 30.0MB/s 1.14"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channel \"1.27/stable\" 83% \u001b[0m30.1MB/s 1.01"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channel \"1.27/stable\" 84% 3\u001b[0m0.1MB/s 919m"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channel \"1.27/stable\" 86% 30.\u001b[0m1MB/s 810m"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channel \"1.27/stable\" 88% 30.2\u001b[0mMB/s 676m"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channel \"1.27/stable\" 91% 30.3MB\u001b[0m/s 555m"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channel \"1.27/stable\" 93% 30.4MB/s\u001b[0m 436m"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channel \"1.27/stable\" 95% 30.5MB/s \u001b[0m317m"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channel \"1.27/stable\" 96% 30.5MB/s 21\u001b[0m1m"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channel \"1.27/stable\" 98% 30.5MB/s 117\u001b[0mm"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channel \"1.27/stable\" 100% 30.5MB/s 11m\u001b[0m"
+ "\r\u001b[7mDownload snap \"microk8s\" (5372) from channel \"1.27/stable\" 100% 30.0MB/s 0.0ns\u001b[0"
+ "\rFetch and check assertions for snap \"microk8s\" (5372) "
+ "\rMount snap \"microk8s\" (5372) \\"
+ "\rMount snap \"microk8s\" (5372) "
+ "\rMount snap \"microk8s\" (5372) "
+ "\rMount snap \"microk8s\" (5372) "
+ "\rSetup snap \"microk8s\" (5372) security profiles \\"
+ "\rSetup snap \"microk8s\" (5372) security profiles "
+ "\rSetup snap \"microk8s\" (5372) security profiles "
+ "\rSetup snap \"microk8s\" (5372) security profiles "
+ "\rSetup snap \"microk8s\" (5372) security profiles \\"
+ "\rSetup snap \"microk8s\" (5372) security profiles "
+ "\rSetup snap \"microk8s\" (5372) security profiles "
+ "\rSetup snap \"microk8s\" (5372) security profiles "
+ "\rSetup snap \"microk8s\" (5372) security profiles \\"
+ "\rSetup snap \"microk8s\" (5372) security profiles "
+ "\rSetup snap \"microk8s\" (5372) security profiles "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present \\"
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present \\"
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present \\"
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present \\"
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present \\"
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present \\"
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present \\"
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present \\"
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present \\"
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present \\"
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present \\"
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present \\"
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present \\"
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present \\"
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rRun install hook of \"microk8s\" snap if present "
+ "\rStart snap \"microk8s\" (5372) services \\"
+ "\rStart snap \"microk8s\" (5372) services "
+ "\rStart snap \"microk8s\" (5372) services "
+ "\rStart snap \"microk8s\" (5372) services "
+ "\rStart snap \"microk8s\" (5372) services \\"
+ "\rStart snap \"microk8s\" (5372) services "
+ "\rStart snap \"microk8s\" (5372) services "
+ "\rStart snap \"microk8s\" (5372) services "
+ "\rStart snap \"microk8s\" (5372) services \\"
+ "\rStart snap \"microk8s\" (5372) services "
+ "\rStart snap \"microk8s\" (5372) services "
+ "\rStart snap \"microk8s\" (5372) services "
+ "\rStart snap \"microk8s\" (5372) services \\"
+ "\rStart snap \"microk8s\" (5372) services "
+ "\rStart snap \"microk8s\" (5372) services "
+ "\rStart snap \"microk8s\" (5372) services "
+ "\rStart snap \"microk8s\" (5372) services \\"
+ "\rRun configure hook of \"microk8s\" snap if present "
+ "\rRun configure hook of \"microk8s\" snap if present "
+ "\rRun configure hook of \"microk8s\" snap if present "
+ "\rRun configure hook of \"microk8s\" snap if present \\"
+ "\rRun configure hook of \"microk8s\" snap if present "
+ "\rRun configure hook of \"microk8s\" snap if present "
+ "\rRun configure hook of \"microk8s\" snap if present "
+ "\rRun configure hook of \"microk8s\" snap if present \\"
+ "\rRun configure hook of \"microk8s\" snap if present "
+ "\rRun configure hook of \"microk8s\" snap if present "
+ "\rRun configure hook of \"microk8s\" snap if present "
+ "\rRun configure hook of \"microk8s\" snap if present \\"
+ "\rRun configure hook of \"microk8s\" snap if present "
+ "\rRun configure hook of \"microk8s\" snap if present "
+ "\rRun configure hook of \"microk8s\" snap if present "
+ "\rRun configure hook of \"microk8s\" snap if present \\"
+ "\rRun configure hook of \"microk8s\" snap if present "
+ "\rRun configure hook of \"microk8s\" snap if present "
+ "\rRun configure hook of \"microk8s\" snap if present "
+ "\rRun configure hook of \"microk8s\" snap if present \\"
+ "\rRun configure hook of \"microk8s\" snap if present "
+ "\rRun service command \"restart\" for services [\"daemon-apiserver-proxy\"] of snap \""
+ "\r\u001b[0m\u001b[?25h\u001b[Kmicrok8s (1.27/stable) v1.27.2 from Canonical** installed\n"
+)
+
+issue_6803_kubectl_out = (
+ "\rEnsure prerequisites for \"kubectl\" are available /"
+ "\rDownload snap \"kubectl\" (5372) from channel \"1.27/stable\" "
+ "\rDownload snap \"kubectl\" (5372) from channel \"1.27/stable\" \\"
+ "\rDownload snap \"kubectl\" (5372) from channel \"1.27/stable\" "
+ "\rDownload snap \"kubectl\" (5372) from channel \"1.27/stable\" /\u001b[?25"
+ "\r\u001b[7m\u001b[0mDownload snap \"kubectl\" (5372) from channel \"1.27/stable\" 0% 0B/s ages"
+ "\r\u001b[7m\u001b[0mDownload snap \"kubectl\" (5372) from channel \"1.27/stable\" 0% 0B/s ages"
+ "\r\u001b[7m\u001b[0mDownload snap \"kubectl\" (5372) from channel \"1.27/stable\" 0% 0B/s ages"
+ "\r\u001b[7m\u001b[0mDownload snap \"kubectl\" (5372) from channel \"1.27/stable\" 0% 880kB/s 3m21"
+ "\r\u001b[7m\u001b[0mDownload snap \"kubectl\" (5372) from channel \"1.27/stable\" 1% 2.82MB/s 1m02"
+ "\r\u001b[7mD\u001b[0mownload snap \"kubectl\" (5372) from channel \"1.27/stable\" 2% 4.71MB/s 37.0"
+ "\r\u001b[7mDo\u001b[0mwnload snap \"kubectl\" (5372) from channel \"1.27/stable\" 4% 9.09MB/s 18.8"
+ "\r\u001b[7mDown\u001b[0mload snap \"kubectl\" (5372) from channel \"1.27/stable\" 6% 12.4MB/s 13.5"
+ "\r\u001b[7mDownl\u001b[0moad snap \"kubectl\" (5372) from channel \"1.27/stable\" 7% 14.5MB/s 11.3"
+ "\r\u001b[7mDownloa\u001b[0md snap \"kubectl\" (5372) from channel \"1.27/stable\" 9% 15.9MB/s 10.1"
+ "\r\u001b[7mDownload \u001b[0msnap \"kubectl\" (5372) from channel \"1.27/stable\" 11% 18.0MB/s 8.75"
+ "\r\u001b[7mDownload s\u001b[0mnap \"kubectl\" (5372) from channel \"1.27/stable\" 13% 19.4MB/s 7.91"
+ "\r\u001b[7mDownload sn\u001b[0map \"kubectl\" (5372) from channel \"1.27/stable\" 15% 20.1MB/s 7.50"
+ "\r\u001b[7mDownload snap\u001b[0m \"kubectl\" (5372) from channel \"1.27/stable\" 17% 20.9MB/s 7.05"
+ "\r\u001b[7mDownload snap \"\u001b[0mkubectl\" (5372) from channel \"1.27/stable\" 19% 22.1MB/s 6.50"
+ "\r\u001b[7mDownload snap \"m\u001b[0kubectl\" (5372) from channel \"1.27/stable\" 21% 22.9MB/s 6.11"
+ "\r\u001b[7mDownload snap \"mic\u001b[0mrok8s\" (5372) from channel \"1.27/stable\" 23% 23.2MB/s 5.90"
+ "\r\u001b[7mDownload snap \"micr\u001b[0mok8s\" (5372) from channel \"1.27/stable\" 25% 23.9MB/s 5.58"
+ "\r\u001b[7mDownload snap \"microk\u001b[0m8s\" (5372) from channel \"1.27/stable\" 27% 24.5MB/s 5.30"
+ "\r\u001b[7mDownload snap \"microk8\u001b[0ms\" (5372) from channel \"1.27/stable\" 29% 24.9MB/s 5.09"
+ "\r\u001b[7mDownload snap \"kubectl\"\u001b[0m (5372) from channel \"1.27/stable\" 31% 25.4MB/s 4.85"
+ "\r\u001b[7mDownload snap \"kubectl\" (\u001b[0m5372) from channel \"1.27/stable\" 33% 25.8MB/s 4.63"
+ "\r\u001b[7mDownload snap \"kubectl\" (5\u001b[0m372) from channel \"1.27/stable\" 35% 26.2MB/s 4.42"
+ "\r\u001b[7mDownload snap \"kubectl\" (53\u001b[0m72) from channel \"1.27/stable\" 36% 26.3MB/s 4.30"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372\u001b[0m) from channel \"1.27/stable\" 38% 26.7MB/s 4.10"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) \u001b[0mfrom channel \"1.27/stable\" 40% 26.9MB/s 3.95"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) f\u001b[0mrom channel \"1.27/stable\" 42% 27.2MB/s 3.77"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) fro\u001b[0mm channel \"1.27/stable\" 44% 27.4MB/s 3.63"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from\u001b[0m channel \"1.27/stable\" 46% 27.8MB/s 3.44"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from c\u001b[0mhannel \"1.27/stable\" 48% 27.9MB/s 3.31"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from cha\u001b[0mnnel \"1.27/stable\" 50% 28.1MB/s 3.15"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from chan\u001b[0mnel \"1.27/stable\" 52% 28.3MB/s 3.02"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channe\u001b[0ml \"1.27/stable\" 54% 28.5MB/s 2.87"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channel\u001b[0m \"1.27/stable\" 56% 28.6MB/s 2.75"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channel \u001b[0m\"1.27/stable\" 57% 28.7MB/s 2.63"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channel \"1\u001b[0m.27/stable\" 60% 28.9MB/s 2.47"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channel \"1.2\u001b[0m7/stable\" 62% 29.0MB/s 2.35"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channel \"1.27\u001b[0m/stable\" 63% 29.1MB/s 2.23"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channel \"1.27/s\u001b[0mtable\" 65% 29.2MB/s 2.10"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channel \"1.27/st\u001b[0mable\" 67% 29.4MB/s 1.97"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channel \"1.27/stab\u001b[0mle\" 69% 29.5MB/s 1.85"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channel \"1.27/stabl\u001b[0me\" 71% 29.5MB/s 1.74"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channel \"1.27/stable\"\u001b[0m 73% 29.7MB/s 1.59"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channel \"1.27/stable\" \u001b[0m 75% 29.8MB/s 1.48"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channel \"1.27/stable\" \u001b[0m 77% 29.8MB/s 1.37"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channel \"1.27/stable\" 7\u001b[0m9% 29.9MB/s 1.26"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channel \"1.27/stable\" 81\u001b[0m% 30.0MB/s 1.14"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channel \"1.27/stable\" 83% \u001b[0m30.1MB/s 1.01"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channel \"1.27/stable\" 84% 3\u001b[0m0.1MB/s 919m"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channel \"1.27/stable\" 86% 30.\u001b[0m1MB/s 810m"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channel \"1.27/stable\" 88% 30.2\u001b[0mMB/s 676m"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channel \"1.27/stable\" 91% 30.3MB\u001b[0m/s 555m"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channel \"1.27/stable\" 93% 30.4MB/s\u001b[0m 436m"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channel \"1.27/stable\" 95% 30.5MB/s \u001b[0m317m"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channel \"1.27/stable\" 96% 30.5MB/s 21\u001b[0m1m"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channel \"1.27/stable\" 98% 30.5MB/s 117\u001b[0mm"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channel \"1.27/stable\" 100% 30.5MB/s 11m\u001b[0m"
+ "\r\u001b[7mDownload snap \"kubectl\" (5372) from channel \"1.27/stable\" 100% 30.0MB/s 0.0ns\u001b[0"
+ "\rFetch and check assertions for snap \"kubectl\" (5372) "
+ "\rMount snap \"kubectl\" (5372) \\"
+ "\rMount snap \"kubectl\" (5372) "
+ "\rMount snap \"kubectl\" (5372) "
+ "\rMount snap \"kubectl\" (5372) "
+ "\rSetup snap \"kubectl\" (5372) security profiles \\"
+ "\rSetup snap \"kubectl\" (5372) security profiles "
+ "\rSetup snap \"kubectl\" (5372) security profiles "
+ "\rSetup snap \"kubectl\" (5372) security profiles "
+ "\rSetup snap \"kubectl\" (5372) security profiles \\"
+ "\rSetup snap \"kubectl\" (5372) security profiles "
+ "\rSetup snap \"kubectl\" (5372) security profiles "
+ "\rSetup snap \"kubectl\" (5372) security profiles "
+ "\rSetup snap \"kubectl\" (5372) security profiles \\"
+ "\rSetup snap \"kubectl\" (5372) security profiles "
+ "\rSetup snap \"kubectl\" (5372) security profiles "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present \\"
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present \\"
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present \\"
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present \\"
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present \\"
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present \\"
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present \\"
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present \\"
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present \\"
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present \\"
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present \\"
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present \\"
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present \\"
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present \\"
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rRun install hook of \"kubectl\" snap if present "
+ "\rStart snap \"kubectl\" (5372) services \\"
+ "\rStart snap \"kubectl\" (5372) services "
+ "\rStart snap \"kubectl\" (5372) services "
+ "\rStart snap \"kubectl\" (5372) services "
+ "\rStart snap \"kubectl\" (5372) services \\"
+ "\rStart snap \"kubectl\" (5372) services "
+ "\rStart snap \"kubectl\" (5372) services "
+ "\rStart snap \"kubectl\" (5372) services "
+ "\rStart snap \"kubectl\" (5372) services \\"
+ "\rStart snap \"kubectl\" (5372) services "
+ "\rStart snap \"kubectl\" (5372) services "
+ "\rStart snap \"kubectl\" (5372) services "
+ "\rStart snap \"kubectl\" (5372) services \\"
+ "\rStart snap \"kubectl\" (5372) services "
+ "\rStart snap \"kubectl\" (5372) services "
+ "\rStart snap \"kubectl\" (5372) services "
+ "\rStart snap \"kubectl\" (5372) services \\"
+ "\rRun configure hook of \"kubectl\" snap if present "
+ "\rRun configure hook of \"kubectl\" snap if present "
+ "\rRun configure hook of \"kubectl\" snap if present "
+ "\rRun configure hook of \"kubectl\" snap if present \\"
+ "\rRun configure hook of \"kubectl\" snap if present "
+ "\rRun configure hook of \"kubectl\" snap if present "
+ "\rRun configure hook of \"kubectl\" snap if present "
+ "\rRun configure hook of \"kubectl\" snap if present \\"
+ "\rRun configure hook of \"kubectl\" snap if present "
+ "\rRun configure hook of \"kubectl\" snap if present "
+ "\rRun configure hook of \"kubectl\" snap if present "
+ "\rRun configure hook of \"kubectl\" snap if present \\"
+ "\rRun configure hook of \"kubectl\" snap if present "
+ "\rRun configure hook of \"kubectl\" snap if present "
+ "\rRun configure hook of \"kubectl\" snap if present "
+ "\rRun configure hook of \"kubectl\" snap if present \\"
+ "\rRun configure hook of \"kubectl\" snap if present "
+ "\rRun configure hook of \"kubectl\" snap if present "
+ "\rRun configure hook of \"kubectl\" snap if present "
+ "\rRun configure hook of \"kubectl\" snap if present \\"
+ "\rRun configure hook of \"kubectl\" snap if present "
+ "\rRun service command \"restart\" for services [\"daemon-apiserver-proxy\"] of snap \""
+ "\r\u001b[0m\u001b[?25h\u001b[Kkubectl (1.27/stable) v1.27.2 from Canonical** installed\n"
+)
+
+TEST_CASES = [
+ ModuleTestCase(
+ id="simple case",
+ input={"name": ["hello-world"]},
+ output=dict(changed=True, snaps_installed=["hello-world"]),
+ flags={},
+ run_command_calls=[
+ RunCmdCall(
+ command=['/testbin/snap', 'info', 'hello-world'],
+ environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
+ rc=0,
+ out='name: hello-world\n',
+ err="",
+ ),
+ RunCmdCall(
+ command=['/testbin/snap', 'list'],
+ environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
+ rc=0,
+ out="",
+ err="",
+ ),
+ RunCmdCall(
+ command=['/testbin/snap', 'install', 'hello-world'],
+ environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
+ rc=0,
+ out="hello-world (12345/stable) v12345 from Canonical** installed\n",
+ err="",
+ ),
+ RunCmdCall(
+ command=['/testbin/snap', 'list'],
+ environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
+ rc=0,
+ out=(
+ "Name Version Rev Tracking Publisher Notes"
+ "core20 20220826 1623 latest/stable canonical** base"
+ "lxd 5.6-794016a 23680 latest/stable/… canonical** -"
+ "hello-world 5.6-794016a 23680 latest/stable/… canonical** -"
+ "snapd 2.57.4 17336 latest/stable canonical** snapd"
+ ""),
+ err="",
+ ),
+ ]
+ ),
+ ModuleTestCase(
+ id="issue_6803",
+ input={"name": ["microk8s", "kubectl"], "classic": True},
+ output=dict(changed=True, snaps_installed=["microk8s", "kubectl"]),
+ flags={},
+ run_command_calls=[
+ RunCmdCall(
+ command=['/testbin/snap', 'info', 'microk8s', 'kubectl'],
+ environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
+ rc=0,
+ out='name: microk8s\n---\nname: kubectl\n',
+ err="",
+ ),
+ RunCmdCall(
+ command=['/testbin/snap', 'list'],
+ environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
+ rc=0,
+ out=issue_6803_status_out,
+ err="",
+ ),
+ RunCmdCall(
+ command=['/testbin/snap', 'install', '--classic', 'microk8s'],
+ environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
+ rc=0,
+ out=issue_6803_microk8s_out,
+ err="",
+ ),
+ RunCmdCall(
+ command=['/testbin/snap', 'install', '--classic', 'kubectl'],
+ environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
+ rc=0,
+ out=issue_6803_kubectl_out,
+ err="",
+ ),
+ RunCmdCall(
+ command=['/testbin/snap', 'list'],
+ environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
+ rc=0,
+ out=(
+ "Name Version Rev Tracking Publisher Notes"
+ "core20 20220826 1623 latest/stable canonical** base"
+ "lxd 5.6-794016a 23680 latest/stable/… canonical** -"
+ "microk8s 5.6-794016a 23680 latest/stable/… canonical** -"
+ "kubectl 5.6-794016a 23680 latest/stable/… canonical** -"
+ "snapd 2.57.4 17336 latest/stable canonical** snapd"
+ ""),
+ err="",
+ ),
+ ]
+ ),
+]
+
+helper = Helper.from_list(snap.main, TEST_CASES)
+patch_bin = helper.cmd_fixture
+test_module = helper.test_module
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_usb_facts.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_usb_facts.py
new file mode 100644
index 000000000..084433492
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_usb_facts.py
@@ -0,0 +1,105 @@
+# Copyright (c) Ansible project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import json
+
+from ansible_collections.community.general.tests.unit.compat import mock
+from ansible_collections.community.general.tests.unit.compat import unittest
+from ansible.module_utils import basic
+from ansible.module_utils.common.text.converters import to_bytes
+from ansible_collections.community.general.plugins.modules import usb_facts
+
+
+def set_module_args(args):
+ """prepare arguments so that they will be picked up during module creation"""
+ args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
+ basic._ANSIBLE_ARGS = to_bytes(args)
+
+
+class AnsibleExitJson(Exception):
+ """Exception class to be raised by module.exit_json and caught by the test case"""
+ pass
+
+
+class AnsibleFailJson(Exception):
+ """Exception class to be raised by module.fail_json and caught by the test case"""
+ pass
+
+
+def exit_json(*args, **kwargs):
+ """function to patch over exit_json; package return data into an exception"""
+ if 'changed' not in kwargs:
+ kwargs['changed'] = False
+ raise AnsibleExitJson(kwargs)
+
+
+def fail_json(*args, **kwargs):
+ """function to patch over fail_json; package return data into an exception"""
+ kwargs['failed'] = True
+ raise AnsibleFailJson(kwargs)
+
+
+def get_bin_path(self, arg, required=False):
+ """Mock AnsibleModule.get_bin_path"""
+ if arg == 'lsusb':
+ return '/usr/bin/lsusb'
+ else:
+ if required:
+ fail_json(msg='%r not found !' % arg)
+
+
+class TestUsbFacts(unittest.TestCase):
+
+ def setUp(self):
+ self.mock_module_helper = mock.patch.multiple(basic.AnsibleModule,
+ exit_json=exit_json,
+ fail_json=fail_json,
+ get_bin_path=get_bin_path)
+ self.mock_module_helper.start()
+ self.addCleanup(self.mock_module_helper.stop)
+ self.testing_data = [
+ {
+ "input": "Bus 001 Device 001: ID 1d6b:0002 Linux Foundation 2.0 root hub",
+ "bus": "001",
+ "device": "001",
+ "id": "1d6b:0002",
+ "name": "Linux Foundation 2.0 root hub"
+ },
+ {
+ "input": "Bus 003 Device 002: ID 8087:8008 Intel Corp. Integrated Rate Matching Hub",
+ "bus": "003",
+ "device": "002",
+ "id": "8087:8008",
+ "name": "Intel Corp. Integrated Rate Matching Hub"
+ }
+ ]
+ self.output_fields = ["bus", "device", "id", "name"]
+
+ def test_parsing_single_line(self):
+ for data in self.testing_data:
+ with mock.patch.object(basic.AnsibleModule, 'run_command') as mock_run_command:
+ command_output = data["input"]
+ mock_run_command.return_value = 0, command_output, None
+ with self.assertRaises(AnsibleExitJson) as result:
+ set_module_args({})
+ usb_facts.main()
+ for output_field in self.output_fields:
+ self.assertEqual(result.exception.args[0]["ansible_facts"]["usb_devices"][0][output_field], data[output_field])
+
+ def test_parsing_multiple_lines(self):
+ input = ""
+ for data in self.testing_data:
+ input += ("%s\n" % data["input"])
+ with mock.patch.object(basic.AnsibleModule, 'run_command') as mock_run_command:
+ mock_run_command.return_value = 0, input, None
+ with self.assertRaises(AnsibleExitJson) as result:
+ set_module_args({})
+ usb_facts.main()
+ for index in range(0, len(self.testing_data)):
+ for output_field in self.output_fields:
+ self.assertEqual(result.exception.args[0]["ansible_facts"]["usb_devices"][index][output_field],
+ self.testing_data[index][output_field])
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_xfconf.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_xfconf.py
index c979fd8d2..fbc2dae5f 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_xfconf.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_xfconf.py
@@ -12,301 +12,9 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import json
from ansible_collections.community.general.plugins.modules import xfconf
+from .helper import Helper
-import pytest
-TESTED_MODULE = xfconf.__name__
-
-
-@pytest.fixture
-def patch_xfconf(mocker):
- """
- Function used for mocking some parts of redhat_subscription module
- """
- mocker.patch('ansible_collections.community.general.plugins.module_utils.mh.module_helper.AnsibleModule.get_bin_path',
- return_value='/testbin/xfconf-query')
-
-
-@pytest.mark.parametrize('patch_ansible_module', [{}], indirect=['patch_ansible_module'])
-@pytest.mark.usefixtures('patch_ansible_module')
-def test_without_required_parameters(capfd, patch_xfconf):
- """
- Failure must occurs when all parameters are missing
- """
- with pytest.raises(SystemExit):
- xfconf.main()
- out, err = capfd.readouterr()
- results = json.loads(out)
- assert results['failed']
- assert 'missing required arguments' in results['msg']
-
-
-TEST_CASES = [
- [
- {
- 'channel': 'xfwm4',
- 'property': '/general/inactive_opacity',
- 'state': 'present',
- 'value_type': 'int',
- 'value': 90,
- },
- {
- 'id': 'test_property_set_property',
- 'run_command.calls': [
- (
- # Calling of following command will be asserted
- ['/testbin/xfconf-query', '--channel', 'xfwm4', '--property', '/general/inactive_opacity'],
- # Was return code checked?
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- # Mock of returned code, stdout and stderr
- (0, '100\n', '',),
- ),
- (
- # Calling of following command will be asserted
- ['/testbin/xfconf-query', '--channel', 'xfwm4', '--property', '/general/inactive_opacity',
- '--create', '--type', 'int', '--set', '90'],
- # Was return code checked?
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- # Mock of returned code, stdout and stderr
- (0, '', '',),
- ),
- ],
- 'changed': True,
- 'previous_value': '100',
- 'value_type': 'int',
- 'value': '90',
- },
- ],
- [
- {
- 'channel': 'xfwm4',
- 'property': '/general/inactive_opacity',
- 'state': 'present',
- 'value_type': 'int',
- 'value': 90,
- },
- {
- 'id': 'test_property_set_property_same_value',
- 'run_command.calls': [
- (
- # Calling of following command will be asserted
- ['/testbin/xfconf-query', '--channel', 'xfwm4', '--property', '/general/inactive_opacity'],
- # Was return code checked?
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- # Mock of returned code, stdout and stderr
- (0, '90\n', '',),
- ),
- (
- # Calling of following command will be asserted
- ['/testbin/xfconf-query', '--channel', 'xfwm4', '--property', '/general/inactive_opacity',
- '--create', '--type', 'int', '--set', '90'],
- # Was return code checked?
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- # Mock of returned code, stdout and stderr
- (0, '', '',),
- ),
- ],
- 'changed': False,
- 'previous_value': '90',
- 'value_type': 'int',
- 'value': '90',
- },
- ],
- [
- {
- 'channel': 'xfce4-session',
- 'property': '/general/SaveOnExit',
- 'state': 'present',
- 'value_type': 'bool',
- 'value': False,
- },
- {
- 'id': 'test_property_set_property_bool_false',
- 'run_command.calls': [
- (
- # Calling of following command will be asserted
- ['/testbin/xfconf-query', '--channel', 'xfce4-session', '--property', '/general/SaveOnExit'],
- # Was return code checked?
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- # Mock of returned code, stdout and stderr
- (0, 'true\n', '',),
- ),
- (
- # Calling of following command will be asserted
- ['/testbin/xfconf-query', '--channel', 'xfce4-session', '--property', '/general/SaveOnExit',
- '--create', '--type', 'bool', '--set', 'false'],
- # Was return code checked?
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- # Mock of returned code, stdout and stderr
- (0, 'false\n', '',),
- ),
- ],
- 'changed': True,
- 'previous_value': 'true',
- 'value_type': 'bool',
- 'value': 'False',
- },
- ],
- [
- {
- 'channel': 'xfwm4',
- 'property': '/general/workspace_names',
- 'state': 'present',
- 'value_type': 'string',
- 'value': ['A', 'B', 'C'],
- },
- {
- 'id': 'test_property_set_array',
- 'run_command.calls': [
- (
- # Calling of following command will be asserted
- ['/testbin/xfconf-query', '--channel', 'xfwm4', '--property', '/general/workspace_names'],
- # Was return code checked?
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- # Mock of returned code, stdout and stderr
- (0, 'Value is an array with 3 items:\n\nMain\nWork\nTmp\n', '',),
- ),
- (
- # Calling of following command will be asserted
- ['/testbin/xfconf-query', '--channel', 'xfwm4', '--property', '/general/workspace_names',
- '--create', '--force-array', '--type', 'string', '--set', 'A', '--type', 'string', '--set', 'B',
- '--type', 'string', '--set', 'C'],
- # Was return code checked?
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- # Mock of returned code, stdout and stderr
- (0, '', '',),
- ),
- ],
- 'changed': True,
- 'previous_value': ['Main', 'Work', 'Tmp'],
- 'value_type': ['str', 'str', 'str'],
- 'value': ['A', 'B', 'C'],
- },
- ],
- [
- {
- 'channel': 'xfwm4',
- 'property': '/general/workspace_names',
- 'state': 'present',
- 'value_type': 'string',
- 'value': ['A', 'B', 'C'],
- },
- {
- 'id': 'test_property_set_array_to_same_value',
- 'run_command.calls': [
- (
- # Calling of following command will be asserted
- ['/testbin/xfconf-query', '--channel', 'xfwm4', '--property', '/general/workspace_names'],
- # Was return code checked?
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- # Mock of returned code, stdout and stderr
- (0, 'Value is an array with 3 items:\n\nA\nB\nC\n', '',),
- ),
- (
- # Calling of following command will be asserted
- ['/testbin/xfconf-query', '--channel', 'xfwm4', '--property', '/general/workspace_names',
- '--create', '--force-array', '--type', 'string', '--set', 'A', '--type', 'string', '--set', 'B',
- '--type', 'string', '--set', 'C'],
- # Was return code checked?
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- # Mock of returned code, stdout and stderr
- (0, '', '',),
- ),
- ],
- 'changed': False,
- 'previous_value': ['A', 'B', 'C'],
- 'value_type': ['str', 'str', 'str'],
- 'value': ['A', 'B', 'C'],
- },
- ],
- [
- {
- 'channel': 'xfwm4',
- 'property': '/general/workspace_names',
- 'state': 'absent',
- },
- {
- 'id': 'test_property_reset_value',
- 'run_command.calls': [
- (
- # Calling of following command will be asserted
- ['/testbin/xfconf-query', '--channel', 'xfwm4', '--property', '/general/workspace_names'],
- # Was return code checked?
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- # Mock of returned code, stdout and stderr
- (0, 'Value is an array with 3 items:\n\nA\nB\nC\n', '',),
- ),
- (
- # Calling of following command will be asserted
- ['/testbin/xfconf-query', '--channel', 'xfwm4', '--property', '/general/workspace_names',
- '--reset'],
- # Was return code checked?
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False},
- # Mock of returned code, stdout and stderr
- (0, '', '',),
- ),
- ],
- 'changed': True,
- 'previous_value': ['A', 'B', 'C'],
- 'value_type': None,
- 'value': None,
- },
- ],
-]
-TEST_CASES_IDS = [item[1]['id'] for item in TEST_CASES]
-
-
-@pytest.mark.parametrize('patch_ansible_module, testcase',
- TEST_CASES,
- ids=TEST_CASES_IDS,
- indirect=['patch_ansible_module'])
-@pytest.mark.usefixtures('patch_ansible_module')
-def test_xfconf(mocker, capfd, patch_xfconf, testcase):
- """
- Run unit tests for test cases listen in TEST_CASES
- """
-
- # Mock function used for running commands first
- call_results = [item[2] for item in testcase['run_command.calls']]
- mock_run_command = mocker.patch(
- 'ansible.module_utils.basic.AnsibleModule.run_command',
- side_effect=call_results)
-
- # Try to run test case
- with pytest.raises(SystemExit):
- xfconf.main()
-
- out, err = capfd.readouterr()
- results = json.loads(out)
- print("testcase =\n%s" % testcase)
- print("results =\n%s" % results)
-
- assert 'changed' in results
- assert results['changed'] == testcase['changed']
-
- for test_result in ('channel', 'property'):
- assert test_result in results, "'{0}' not found in {1}".format(test_result, results)
- assert results[test_result] == results['invocation']['module_args'][test_result], \
- "'{0}': '{1}' != '{2}'".format(test_result, results[test_result], results['invocation']['module_args'][test_result])
-
- assert mock_run_command.call_count == len(testcase['run_command.calls'])
- if mock_run_command.call_count:
- call_args_list = [(item[0][0], item[1]) for item in mock_run_command.call_args_list]
- expected_call_args_list = [(item[0], item[1]) for item in testcase['run_command.calls']]
- print("call args list =\n%s" % call_args_list)
- print("expected args list =\n%s" % expected_call_args_list)
- assert call_args_list == expected_call_args_list
-
- expected_cmd, dummy, expected_res = testcase['run_command.calls'][-1]
- assert results['cmd'] == expected_cmd
- assert results['stdout'] == expected_res[1]
- assert results['stderr'] == expected_res[2]
-
- for conditional_test_result in ('msg', 'value', 'previous_value'):
- if conditional_test_result in testcase:
- assert conditional_test_result in results, "'{0}' not found in {1}".format(conditional_test_result, results)
- assert results[conditional_test_result] == testcase[conditional_test_result], \
- "'{0}': '{1}' != '{2}'".format(conditional_test_result, results[conditional_test_result], testcase[conditional_test_result])
+Helper.from_module(xfconf, __name__)
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_xfconf.yaml b/ansible_collections/community/general/tests/unit/plugins/modules/test_xfconf.yaml
new file mode 100644
index 000000000..908154df2
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_xfconf.yaml
@@ -0,0 +1,185 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) Alexei Znamensky (russoz@gmail.com)
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+---
+- id: test_missing_input
+ input: {}
+ output:
+ failed: true
+ msg: "missing required arguments: channel, property"
+- id: test_property_set_property
+ input:
+ channel: xfwm4
+ property: /general/inactive_opacity
+ state: present
+ value_type: int
+ value: 90
+ output:
+ changed: true
+ previous_value: '100'
+ type: int
+ value: '90'
+ run_command_calls:
+ - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/inactive_opacity]
+ environ: &env-def {environ_update: {LANGUAGE: C, LC_ALL: C}, check_rc: false}
+ rc: 0
+ out: "100\n"
+ err: ""
+ - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/inactive_opacity, --create, --type, int, --set, '90']
+ environ: *env-def
+ rc: 0
+ out: ""
+ err: ""
+- id: test_property_set_property_same_value
+ input:
+ channel: xfwm4
+ property: /general/inactive_opacity
+ state: present
+ value_type: int
+ value: 90
+ output:
+ changed: false
+ previous_value: '90'
+ type: int
+ value: '90'
+ run_command_calls:
+ - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/inactive_opacity]
+ environ: *env-def
+ rc: 0
+ out: "90\n"
+ err: ""
+ - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/inactive_opacity, --create, --type, int, --set, '90']
+ environ: *env-def
+ rc: 0
+ out: ""
+ err: ""
+- id: test_property_set_property_bool_false
+ input:
+ channel: xfce4-session
+ property: /general/SaveOnExit
+ state: present
+ value_type: bool
+ value: False
+ output:
+ changed: true
+ previous_value: 'true'
+ type: bool
+ value: 'False'
+ run_command_calls:
+ - command: [/testbin/xfconf-query, --channel, xfce4-session, --property, /general/SaveOnExit]
+ environ: *env-def
+ rc: 0
+ out: "true\n"
+ err: ""
+ - command: [/testbin/xfconf-query, --channel, xfce4-session, --property, /general/SaveOnExit, --create, --type, bool, --set, 'false']
+ environ: *env-def
+ rc: 0
+ out: "false\n"
+ err: ""
+- id: test_property_set_array
+ input:
+ channel: xfwm4
+ property: /general/workspace_names
+ state: present
+ value_type: string
+ value: [A, B, C]
+ output:
+ changed: true
+ previous_value: [Main, Work, Tmp]
+ type: [string, string, string]
+ value: [A, B, C]
+ run_command_calls:
+ - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/workspace_names]
+ environ: *env-def
+ rc: 0
+ out: "Value is an array with 3 items:\n\nMain\nWork\nTmp\n"
+ err: ""
+ - command:
+ - /testbin/xfconf-query
+ - --channel
+ - xfwm4
+ - --property
+ - /general/workspace_names
+ - --create
+ - --force-array
+ - --type
+ - string
+ - --set
+ - A
+ - --type
+ - string
+ - --set
+ - B
+ - --type
+ - string
+ - --set
+ - C
+ environ: *env-def
+ rc: 0
+ out: ""
+ err: ""
+- id: test_property_set_array_to_same_value
+ input:
+ channel: xfwm4
+ property: /general/workspace_names
+ state: present
+ value_type: string
+ value: [A, B, C]
+ output:
+ changed: false
+ previous_value: [A, B, C]
+ type: [string, string, string]
+ value: [A, B, C]
+ run_command_calls:
+ - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/workspace_names]
+ environ: *env-def
+ rc: 0
+ out: "Value is an array with 3 items:\n\nA\nB\nC\n"
+ err: ""
+ - command:
+ - /testbin/xfconf-query
+ - --channel
+ - xfwm4
+ - --property
+ - /general/workspace_names
+ - --create
+ - --force-array
+ - --type
+ - string
+ - --set
+ - A
+ - --type
+ - string
+ - --set
+ - B
+ - --type
+ - string
+ - --set
+ - C
+ environ: *env-def
+ rc: 0
+ out: ""
+ err: ""
+- id: test_property_reset_value
+ input:
+ channel: xfwm4
+ property: /general/workspace_names
+ state: absent
+ output:
+ changed: true
+ previous_value: [A, B, C]
+ type: null
+ value: null
+ run_command_calls:
+ - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/workspace_names]
+ environ: *env-def
+ rc: 0
+ out: "Value is an array with 3 items:\n\nA\nB\nC\n"
+ err: ""
+ - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/workspace_names, --reset]
+ environ: *env-def
+ rc: 0
+ out: ""
+ err: ""
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_xfconf_info.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_xfconf_info.py
index dfcd4f33a..67c63dda0 100644
--- a/ansible_collections/community/general/tests/unit/plugins/modules/test_xfconf_info.py
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_xfconf_info.py
@@ -5,168 +5,9 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import json
from ansible_collections.community.general.plugins.modules import xfconf_info
+from .helper import Helper
-import pytest
-TESTED_MODULE = xfconf_info.__name__
-
-
-@pytest.fixture
-def patch_xfconf_info(mocker):
- """
- Function used for mocking some parts of redhat_subscription module
- """
- mocker.patch('ansible_collections.community.general.plugins.module_utils.mh.module_helper.AnsibleModule.get_bin_path',
- return_value='/testbin/xfconf-query')
-
-
-TEST_CASES = [
- [
- {'channel': 'xfwm4', 'property': '/general/inactive_opacity'},
- {
- 'id': 'test_simple_property_get',
- 'run_command.calls': [
- (
- # Calling of following command will be asserted
- ['/testbin/xfconf-query', '--channel', 'xfwm4', '--property', '/general/inactive_opacity'],
- # Was return code checked?
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': True},
- # Mock of returned code, stdout and stderr
- (0, '100\n', '',),
- ),
- ],
- 'is_array': False,
- 'value': '100',
- }
- ],
- [
- {'channel': 'xfwm4', 'property': '/general/i_dont_exist'},
- {
- 'id': 'test_simple_property_get_nonexistent',
- 'run_command.calls': [
- (
- # Calling of following command will be asserted
- ['/testbin/xfconf-query', '--channel', 'xfwm4', '--property', '/general/i_dont_exist'],
- # Was return code checked?
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': True},
- # Mock of returned code, stdout and stderr
- (1, '', 'Property "/general/i_dont_exist" does not exist on channel "xfwm4".\n',),
- ),
- ],
- 'is_array': False,
- }
- ],
- [
- {'property': '/general/i_dont_exist'},
- {
- 'id': 'test_property_no_channel',
- 'run_command.calls': [],
- }
- ],
- [
- {'channel': 'xfwm4', 'property': '/general/workspace_names'},
- {
- 'id': 'test_property_get_array',
- 'run_command.calls': [
- (
- # Calling of following command will be asserted
- ['/testbin/xfconf-query', '--channel', 'xfwm4', '--property', '/general/workspace_names'],
- # Was return code checked?
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': True},
- # Mock of returned code, stdout and stderr
- (0, 'Value is an array with 3 items:\n\nMain\nWork\nTmp\n', '',),
- ),
- ],
- 'is_array': True,
- 'value_array': ['Main', 'Work', 'Tmp'],
- },
- ],
- [
- {},
- {
- 'id': 'get_channels',
- 'run_command.calls': [
- (
- # Calling of following command will be asserted
- ['/testbin/xfconf-query', '--list'],
- # Was return code checked?
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': True},
- # Mock of returned code, stdout and stderr
- (0, 'Channels:\n a\n b\n c\n', '',),
- ),
- ],
- 'is_array': False,
- 'channels': ['a', 'b', 'c'],
- },
- ],
- [
- {'channel': 'xfwm4'},
- {
- 'id': 'get_properties',
- 'run_command.calls': [
- (
- # Calling of following command will be asserted
- ['/testbin/xfconf-query', '--list', '--channel', 'xfwm4'],
- # Was return code checked?
- {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': True},
- # Mock of returned code, stdout and stderr
- (0, '/general/wrap_cycle\n/general/wrap_layout\n/general/wrap_resistance\n/general/wrap_windows\n'
- '/general/wrap_workspaces\n/general/zoom_desktop\n', '',),
- ),
- ],
- 'is_array': False,
- 'properties': [
- '/general/wrap_cycle',
- '/general/wrap_layout',
- '/general/wrap_resistance',
- '/general/wrap_windows',
- '/general/wrap_workspaces',
- '/general/zoom_desktop',
- ],
- },
- ],
-]
-TEST_CASES_IDS = [item[1]['id'] for item in TEST_CASES]
-
-
-@pytest.mark.parametrize('patch_ansible_module, testcase',
- TEST_CASES,
- ids=TEST_CASES_IDS,
- indirect=['patch_ansible_module'])
-@pytest.mark.usefixtures('patch_ansible_module')
-def test_xfconf_info(mocker, capfd, patch_xfconf_info, testcase):
- """
- Run unit tests for test cases listen in TEST_CASES
- """
-
- # Mock function used for running commands first
- call_results = [item[2] for item in testcase['run_command.calls']]
- mock_run_command = mocker.patch(
- 'ansible_collections.community.general.plugins.module_utils.mh.module_helper.AnsibleModule.run_command',
- side_effect=call_results)
-
- # Try to run test case
- with pytest.raises(SystemExit):
- xfconf_info.main()
-
- out, err = capfd.readouterr()
- results = json.loads(out)
- print("testcase =\n%s" % testcase)
- print("results =\n%s" % results)
-
- for conditional_test_result in ('value_array', 'value', 'is_array', 'properties', 'channels'):
- if conditional_test_result in testcase:
- assert conditional_test_result in results, "'{0}' not found in {1}".format(conditional_test_result, results)
- assert results[conditional_test_result] == testcase[conditional_test_result], \
- "'{0}': '{1}' != '{2}'".format(conditional_test_result, results[conditional_test_result], testcase[conditional_test_result])
-
- assert mock_run_command.call_count == len(testcase['run_command.calls'])
- if mock_run_command.call_count:
- call_args_list = [(item[0][0], item[1]) for item in mock_run_command.call_args_list]
- expected_call_args_list = [(item[0], item[1]) for item in testcase['run_command.calls']]
- print("call args list =\n%s" % call_args_list)
- print("expected args list =\n%s" % expected_call_args_list)
- assert call_args_list == expected_call_args_list
+Helper.from_module(xfconf_info, __name__)
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_xfconf_info.yaml b/ansible_collections/community/general/tests/unit/plugins/modules/test_xfconf_info.yaml
new file mode 100644
index 000000000..519a87fdb
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_xfconf_info.yaml
@@ -0,0 +1,83 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) Alexei Znamensky (russoz@gmail.com)
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+---
+- id: test_simple_property_get
+ input:
+ channel: xfwm4
+ property: /general/inactive_opacity
+ output:
+ value: '100'
+ is_array: false
+ run_command_calls:
+ - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/inactive_opacity]
+ environ: &env-def {environ_update: {LANGUAGE: C, LC_ALL: C}, check_rc: true}
+ rc: 0
+ out: "100\n"
+ err: ""
+- id: test_simple_property_get_nonexistent
+ input:
+ channel: xfwm4
+ property: /general/i_dont_exist
+ output: {}
+ run_command_calls:
+ - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/i_dont_exist]
+ environ: *env-def
+ rc: 1
+ out: ""
+ err: 'Property "/general/i_dont_exist" does not exist on channel "xfwm4".\n'
+- id: test_property_no_channel
+ input:
+ property: /general/i_dont_exist
+ output:
+ failed: true
+ msg: "missing parameter(s) required by 'property': channel"
+ run_command_calls: []
+- id: test_property_get_array
+ input:
+ channel: xfwm4
+ property: /general/workspace_names
+ output:
+ is_array: true
+ value_array: [Main, Work, Tmp]
+ run_command_calls:
+ - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/workspace_names]
+ environ: *env-def
+ rc: 0
+ out: "Value is an array with 3 items:\n\nMain\nWork\nTmp\n"
+ err: ""
+- id: get_channels
+ input: {}
+ output:
+ channels: [a, b, c]
+ run_command_calls:
+ - command: [/testbin/xfconf-query, --list]
+ environ: *env-def
+ rc: 0
+ out: "Channels:\n a\n b\n c\n"
+ err: ""
+- id: get_properties
+ input:
+ channel: xfwm4
+ output:
+ properties:
+ - /general/wrap_cycle
+ - /general/wrap_layout
+ - /general/wrap_resistance
+ - /general/wrap_windows
+ - /general/wrap_workspaces
+ - /general/zoom_desktop
+ run_command_calls:
+ - command: [/testbin/xfconf-query, --list, --channel, xfwm4]
+ environ: *env-def
+ rc: 0
+ out: |
+ /general/wrap_cycle
+ /general/wrap_layout
+ /general/wrap_resistance
+ /general/wrap_windows
+ /general/wrap_workspaces
+ /general/zoom_desktop
+ err: ""
diff --git a/ansible_collections/community/general/tests/unit/requirements.txt b/ansible_collections/community/general/tests/unit/requirements.txt
index 0aa7c1fc9..218fe4567 100644
--- a/ansible_collections/community/general/tests/unit/requirements.txt
+++ b/ansible_collections/community/general/tests/unit/requirements.txt
@@ -6,10 +6,12 @@ unittest2 ; python_version < '2.7'
importlib ; python_version < '2.7'
# requirement for the memcached cache plugin
-python-memcached
+python-memcached < 1.60 ; python_version < '3.6'
+python-memcached ; python_version >= '3.6'
# requirement for the redis cache plugin
redis
+async-timeout ; python_version == '3.11'
# requirement for the linode module
linode-python # APIv3
@@ -43,4 +45,12 @@ dataclasses ; python_version == '3.6'
elastic-apm ; python_version >= '3.6'
# requirements for scaleway modules
-passlib[argon2] \ No newline at end of file
+passlib[argon2]
+
+# requirements for the proxmox modules
+proxmoxer < 2.0.0 ; python_version >= '2.7' and python_version <= '3.6'
+proxmoxer ; python_version > '3.6'
+
+#requirements for nomad_token modules
+python-nomad < 2.0.0 ; python_version <= '3.6'
+python-nomad >= 2.0.0 ; python_version >= '3.7'